We are provided with a training set and a test set of images of dogs. Each image has a filename that is its unique id. The dataset comprises 120 breeds of dogs. The goal is to create a classifier capable of determining a dog's breed from a photo. The list of breeds is as follows
In this Task, we were provided a strictly canine subset of ImageNet in order to practice fine-grained image categorization. How well we can tell our Norfolk Terriers from our Norwich Terriers? With 120 breeds of dogs and a limited number training images per class, we might find the problem more, err, ruff than we anticipated.
import os
from os.path import join
from os import listdir
import random
import cv2
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
from jupyterthemes import jtplot
plt.style.use('fivethirtyeight')
plt.rc_context({'text.color': 'white', 'legend.facecolor': 'black', 'axes.edgecolor':'white', 'axes.labelcolor': 'white', 'xtick.color':'white', 'ytick.color':'white', 'figure.facecolor':'white'})
%matplotlib inline
def display_image(path, label=None):
import base64, io
from IPython.core.display import display, HTML
from PIL import Image as PILImage
image = PILImage.open(path)
output = io.BytesIO()
image.save(output, format='PNG')
encoded_string = base64.b64encode(output.getvalue()).decode()
html = '<img src="data:image/png;base64,{}"/>'.format(encoded_string) if label is None else '<div style="text-align: center;">{}<img src="data:image/png;base64,{}" width="200" height="200"/></div>'.format(label, encoded_string)
display(HTML(html))
tf.config.list_physical_devices(device_type=None)
[PhysicalDevice(name='/physical_device:CPU:0', device_type='CPU'), PhysicalDevice(name='/physical_device:XLA_CPU:0', device_type='XLA_CPU'), PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU'), PhysicalDevice(name='/physical_device:XLA_GPU:0', device_type='XLA_GPU')]
data_dir = os.getcwd() + "/data/dog-breed/"
train_dir = join(data_dir, 'train')
test_dir = join(data_dir, 'test')
print(f'Train samples directory path: {train_dir}')
print(f'Test samples directory path: {test_dir}')
train_count = len(listdir(train_dir))
test_count = len(listdir(test_dir))
print(f'\n\nTrain dataset size: {train_count}')
print(f'Test dataset size (unlabeled): {test_count}')
Train samples directory path: C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/train Test samples directory path: C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/test Train dataset size: 10222 Test dataset size (unlabeled): 10357
labeled_data = pd.read_csv(join(data_dir, 'labels.csv'))
labeled_data['id'] = labeled_data['id'].apply(lambda name: f"{name}.jpg")
# Create dictonaries from label to index and the opposite
label_to_index = {label:index for index, label in enumerate(labeled_data['breed'].unique())}
index_to_label = {index:label for index, label in enumerate(labeled_data['breed'].unique())}
print("Display top 10 samples from the labeled train data")
labeled_data.head(10)
Display top 10 samples from the labeled train data
| id | breed | |
|---|---|---|
| 0 | 000bec180eb18c7604dcecc8fe0dba07.jpg | boston_bull |
| 1 | 001513dfcb2ffafc82cccf4d8bbaba97.jpg | dingo |
| 2 | 001cdf01b096e06d78e9e5112d419397.jpg | pekinese |
| 3 | 00214f311d5d2247d5dfe4fe24b2303d.jpg | bluetick |
| 4 | 0021f9ceb3235effd7fcde7f7538ed62.jpg | golden_retriever |
| 5 | 002211c81b498ef88e1b40b9abf84e1d.jpg | bedlington_terrier |
| 6 | 00290d3e1fdd27226ba27a8ce248ce85.jpg | bedlington_terrier |
| 7 | 002a283a315af96eaea0e28e7163b21b.jpg | borzoi |
| 8 | 003df8b8a8b05244b1d920bb6cf451f9.jpg | basenji |
| 9 | 0042188c895a2f14ef64a918ed9c7b64.jpg | scottish_deerhound |
Y_train = labeled_data['breed']
labels = Y_train.unique()
print(f'Labels: \n {labels}')
label_count = len(labels)
print(f'\n\nTotal labeled data: {len(Y_train)}')
print(f"Total classes: {label_count}")
images_shape = (128, 128)
Labels: ['boston_bull' 'dingo' 'pekinese' 'bluetick' 'golden_retriever' 'bedlington_terrier' 'borzoi' 'basenji' 'scottish_deerhound' 'shetland_sheepdog' 'walker_hound' 'maltese_dog' 'norfolk_terrier' 'african_hunting_dog' 'wire-haired_fox_terrier' 'redbone' 'lakeland_terrier' 'boxer' 'doberman' 'otterhound' 'standard_schnauzer' 'irish_water_spaniel' 'black-and-tan_coonhound' 'cairn' 'affenpinscher' 'labrador_retriever' 'ibizan_hound' 'english_setter' 'weimaraner' 'giant_schnauzer' 'groenendael' 'dhole' 'toy_poodle' 'border_terrier' 'tibetan_terrier' 'norwegian_elkhound' 'shih-tzu' 'irish_terrier' 'kuvasz' 'german_shepherd' 'greater_swiss_mountain_dog' 'basset' 'australian_terrier' 'schipperke' 'rhodesian_ridgeback' 'irish_setter' 'appenzeller' 'bloodhound' 'samoyed' 'miniature_schnauzer' 'brittany_spaniel' 'kelpie' 'papillon' 'border_collie' 'entlebucher' 'collie' 'malamute' 'welsh_springer_spaniel' 'chihuahua' 'saluki' 'pug' 'malinois' 'komondor' 'airedale' 'leonberg' 'mexican_hairless' 'bull_mastiff' 'bernese_mountain_dog' 'american_staffordshire_terrier' 'lhasa' 'cardigan' 'italian_greyhound' 'clumber' 'scotch_terrier' 'afghan_hound' 'old_english_sheepdog' 'saint_bernard' 'miniature_pinscher' 'eskimo_dog' 'irish_wolfhound' 'brabancon_griffon' 'toy_terrier' 'chow' 'flat-coated_retriever' 'norwich_terrier' 'soft-coated_wheaten_terrier' 'staffordshire_bullterrier' 'english_foxhound' 'gordon_setter' 'siberian_husky' 'newfoundland' 'briard' 'chesapeake_bay_retriever' 'dandie_dinmont' 'great_pyrenees' 'beagle' 'vizsla' 'west_highland_white_terrier' 'kerry_blue_terrier' 'whippet' 'sealyham_terrier' 'standard_poodle' 'keeshond' 'japanese_spaniel' 'miniature_poodle' 'pomeranian' 'curly-coated_retriever' 'yorkshire_terrier' 'pembroke' 'great_dane' 'blenheim_spaniel' 'silky_terrier' 'sussex_spaniel' 'german_short-haired_pointer' 'french_bulldog' 'bouvier_des_flandres' 'tibetan_mastiff' 'english_springer' 'cocker_spaniel' 'rottweiler'] Total labeled data: 10222 Total classes: 120
label_sample_count = labeled_data.groupby(['breed']).agg(['count'])
label_sample_count.head(10)
| id | |
|---|---|
| count | |
| breed | |
| affenpinscher | 80 |
| afghan_hound | 116 |
| african_hunting_dog | 86 |
| airedale | 107 |
| american_staffordshire_terrier | 74 |
| appenzeller | 78 |
| australian_terrier | 102 |
| basenji | 110 |
| basset | 82 |
| beagle | 105 |
print("Mean rounded train samples in each class:")
print(label_sample_count.mean(axis = 0).round())
Mean rounded train samples in each class: id count 85.0 dtype: float64
labeled_data.groupby("breed").count().sort_values("id", ascending=False).head(10)
| id | |
|---|---|
| breed | |
| scottish_deerhound | 126 |
| maltese_dog | 117 |
| afghan_hound | 116 |
| entlebucher | 115 |
| bernese_mountain_dog | 114 |
| shih-tzu | 112 |
| great_pyrenees | 111 |
| pomeranian | 111 |
| basenji | 110 |
| samoyed | 109 |
labeled_data.groupby("breed").count().sort_values("id", ascending=False).tail(10)
| id | |
|---|---|
| breed | |
| tibetan_mastiff | 69 |
| german_shepherd | 69 |
| giant_schnauzer | 69 |
| walker_hound | 69 |
| otterhound | 69 |
| golden_retriever | 67 |
| brabancon_griffon | 67 |
| komondor | 67 |
| briard | 66 |
| eskimo_dog | 66 |
hist = labeled_data['breed'].hist(figsize=(16, 8), bins=120, xrot=90, xlabelsize=8)
print("Labeled images class distribution")
plt.show()
Labeled images class distribution
We need to overcome this by weighting our labels relative to their presence in the dataset to avoid diversion.
We can see that the train samples from each label varies. We want to understand the amount and dimensions of our image data better
from PIL import Image
widths, heights = [], []
for filename in labeled_data['id']:
image = Image.open(join(train_dir, filename))
widths.append(image.size[0]) # width
heights.append(image.size[1]) # height
print('Training data average (width, height): ', (np.mean(widths), np.mean(heights)))
print('Training data (minimum width, maximum width): ', (np.min(widths), np.max(widths)))
print('Training data (minimum height, maximum height): ', (np.min(heights), np.max(heights)))
print('Training data (Standart deviation of widths, Standart deviation of heights): ', (np.std(widths), np.std(heights)))
Training data average (width, height): (443.33153981608297, 386.74721189591077) Training data (minimum width, maximum width): (97, 3264) Training data (minimum height, maximum height): (102, 2562) Training data (Standart deviation of widths, Standart deviation of heights): (152.41266408441547, 130.06196422027517)
fig, (ax1, ax2) = plt.subplots(2, figsize=(12, 8))
fig.suptitle('Histograms of training image widths, heights')
ax1.hist(widths, bins=100); ax1.set_xlabel('Width'); ax1.set_ylabel('No. of images')
ax2.hist(heights, bins=100); ax2.set_xlabel('Height'); ax2.set_ylabel('No. of images')
print("Labeled images widths and heights distribution")
plt.show()
Labeled images widths and heights distribution
The following articles are strongly related to the problem that we are trying to solve here.
Although they don't not use the exact same dog breed dataset, they use a dog breed data set that has a label count simillar to ours and mostly use transfer learning techniques.
Dog Breed Identification - Stanford University
Dog Breed Identification - University of Waterloo
Dog Breed Identification - ResearchGate
The following tables and charts show a comparison of results gotten using different known CNN models to solve a simillar dog-identification problem:
We can see the metrics for train and validation data with and without augmentation.
display_image("benchmark.png")
display_image("benchmark3.png")
display_image("benchmark2.png")
from mpl_toolkits.axes_grid1 import ImageGrid
def plot_multiple_imgs_by_class(imgs, labels, nrows=10, ncols=12, figsize=(18, 18)):
fig = plt.figure(1, figsize=figsize)
grid = ImageGrid(fig, 111, nrows_ncols=(nrows, ncols), axes_pad=0.05)
for i in range(nrows * ncols):
ax = grid[i]
ax.imshow(imgs[i] / 255.)
ax.axis('off')
ax.text(0, 0, '%s' % labels[i], color='k', backgroundcolor='w', alpha=0.8)
plt.tight_layout()
def read_and_resize_single_img(filename, nrow=224, ncol=224, channels=3):
arr = cv2.imread(filename)
arr = cv2.cvtColor(arr, cv2.COLOR_RGB2BGR)
arr = cv2.resize(arr,dsize=(nrow,ncol)) # Need to resize image since the dimensions of the images not uniform
return arr
def read_train_from_dirs(data_path, num_imgs=120, nrow=224, ncol=224, channels=3):
filenames = labeled_data['id'].tolist()
random.shuffle(filenames)
imgs = []
for filename in filenames[:num_imgs]:
label_row = labeled_data.loc[labeled_data['id'] == filename]['breed']
label = label_row.iloc[0]
imgs.append((join(train_dir, filename), label))
X = np.zeros((num_imgs,nrow,ncol,channels))
for i, img in enumerate(imgs):
arr = read_and_resize_single_img(img[0],nrow,ncol,channels)
X[i] = np.float16(arr)
return X, [img[1] for img in imgs]
imgs, labels = read_train_from_dirs(data_path=train_dir)
plot_multiple_imgs_by_class(imgs, labels=labels)
C:\Users\Itay Bouganim\AppData\Roaming\Python\Python37\site-packages\ipykernel_launcher.py:11: UserWarning: This figure includes Axes that are not compatible with tight_layout, so results might be incorrect. # This is added back by InteractiveShellApp.init_path()
All of those factors and the fact that this is a relatively small dataset for 120 labels, can make the training process harder and need to be taken into account.
display_image("african.jpg", label="African Hunting Dog")
display_image("shitzu.jpg", label="Shi-Tzu")
display_image("samoyed.jpg", label="Samoyed")
display_image("great.jpg", label="Great Pyrenees")
display_image("face.jpg", label="Mainly Face")
display_image("scene.jpg", label="Mainly Scenery")
display_image("humans.jpg", label="")
display_image("obstuct.jpg", label="")
display_image("kfold.png")
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Input, MaxPool2D, Dropout, BatchNormalization, GlobalAveragePooling2D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.preprocessing.image import ImageDataGenerator
def one_hot_encode(y):
y = np.array([label_to_index[label] for label in y])
enc = np.zeros((y.size, y.max() + 1))
enc[np.arange(y.size), y] = 1
return enc
def create_model(optimizer):
inp = Input((128, 128, 3))
# Block 1
x = Conv2D(16,(3, 3), activation='relu')(inp)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 2
x = Conv2D(32,(3, 3), activation='relu')(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 3
x = Conv2D(48,(3, 3), activation='relu')(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Fully Connected
x = Flatten()(x)
x = Dense(64, activation='relu')(x)
x = Dense(120, activation='softmax')(x)
model = Model(inputs=inp, outputs=x)
model.compile(loss='categorical_crossentropy',optimizer=optimizer, metrics=['accuracy'])
return model
create_model('adam').summary()
Model: "functional_219" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_110 (InputLayer) [(None, 128, 128, 3)] 0 _________________________________________________________________ conv2d_446 (Conv2D) (None, 126, 126, 16) 448 _________________________________________________________________ max_pooling2d_424 (MaxPoolin (None, 63, 63, 16) 0 _________________________________________________________________ conv2d_447 (Conv2D) (None, 61, 61, 32) 4640 _________________________________________________________________ max_pooling2d_425 (MaxPoolin (None, 30, 30, 32) 0 _________________________________________________________________ conv2d_448 (Conv2D) (None, 28, 28, 48) 13872 _________________________________________________________________ max_pooling2d_426 (MaxPoolin (None, 14, 14, 48) 0 _________________________________________________________________ flatten_109 (Flatten) (None, 9408) 0 _________________________________________________________________ dense_402 (Dense) (None, 64) 602176 _________________________________________________________________ dense_403 (Dense) (None, 120) 7800 ================================================================= Total params: 628,936 Trainable params: 628,936 Non-trainable params: 0 _________________________________________________________________
from sklearn.model_selection import KFold
from sklearn.utils import class_weight
k = 5
def generate_kfold(X, Y, k=5):
kf = KFold(n_splits=k)
folds = []
for idx, (train_index, val_index) in enumerate(kf.split(X, Y)):
training_data = labeled_data.iloc[train_index]
validation_data = labeled_data.iloc[val_index]
folds.append({'train': training_data, 'valid': validation_data})
print(f"Fold {idx + 1} => train count: {len(folds[idx]['train'])}, valid count: {len(folds[idx]['valid'])}")
print(folds[idx]['valid'].groupby(['breed']).agg(['count']))
return folds
folds = generate_kfold(np.zeros(train_count), Y_train)
Fold 1 => train count: 8177, valid count: 2045
id
count
breed
affenpinscher 19
afghan_hound 21
african_hunting_dog 13
airedale 23
american_staffordshire_terrier 13
... ...
welsh_springer_spaniel 22
west_highland_white_terrier 17
whippet 22
wire-haired_fox_terrier 10
yorkshire_terrier 20
[120 rows x 1 columns]
Fold 2 => train count: 8177, valid count: 2045
id
count
breed
affenpinscher 15
afghan_hound 19
african_hunting_dog 19
airedale 23
american_staffordshire_terrier 11
... ...
welsh_springer_spaniel 17
west_highland_white_terrier 12
whippet 17
wire-haired_fox_terrier 14
yorkshire_terrier 13
[120 rows x 1 columns]
Fold 3 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 16
afghan_hound 32
african_hunting_dog 17
airedale 18
american_staffordshire_terrier 16
... ...
welsh_springer_spaniel 16
west_highland_white_terrier 20
whippet 14
wire-haired_fox_terrier 23
yorkshire_terrier 20
[120 rows x 1 columns]
Fold 4 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 12
afghan_hound 22
african_hunting_dog 16
airedale 23
american_staffordshire_terrier 14
... ...
welsh_springer_spaniel 14
west_highland_white_terrier 12
whippet 22
wire-haired_fox_terrier 22
yorkshire_terrier 17
[120 rows x 1 columns]
Fold 5 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 18
afghan_hound 22
african_hunting_dog 21
airedale 20
american_staffordshire_terrier 20
... ...
welsh_springer_spaniel 10
west_highland_white_terrier 20
whippet 20
wire-haired_fox_terrier 13
yorkshire_terrier 12
[120 rows x 1 columns]
BATCH_SIZE = 20
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
model_save_dir = join(data_dir, 'saved_models')
def get_callbacks(saved_model_name):
early_stopping_callback = EarlyStopping(monitor="val_loss", patience=8, verbose=2)
checkpoint_callback = ModelCheckpoint(join(model_save_dir, f'model_{saved_model_name}.h5'),
monitor='val_loss', verbose=1,
save_best_only=True, mode='min')
return [early_stopping_callback, checkpoint_callback]
def train_simple_cnn_model(folds, optimizer, image_generator, epochs, dims):
hist = []
for idx, fold in enumerate(folds):
print(f'\n\nFitting model with fold no. {idx + 1}')
train_data_generator = image_generator.flow_from_dataframe(dataframe=fold['train'],target_size=dims, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
valid_data_generator = image_generator.flow_from_dataframe(dataframe=fold['valid'], target_size=dims, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
callbacks = get_callbacks(saved_model_name=f'cnn_fold{idx}')
model = create_model(optimizer)
history = model.fit(x=train_data_generator, validation_data=valid_data_generator, callbacks=callbacks, epochs=epochs, verbose=2)
hist.append(history)
return hist
cnn_history = train_simple_cnn_model(folds=folds, optimizer='adam', image_generator=ImageDataGenerator(), epochs=20, dims=images_shape)
Fitting model with fold no. 1 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.00929, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_cnn_fold0.h5 409/409 - 27s - loss: 5.1871 - accuracy: 0.0102 - val_loss: 4.7860 - val_accuracy: 0.0093 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.00929 to 0.01125, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_cnn_fold0.h5 409/409 - 21s - loss: 4.7837 - accuracy: 0.0099 - val_loss: 4.7852 - val_accuracy: 0.0112 Epoch 3/20 Epoch 00003: val_accuracy did not improve from 0.01125 409/409 - 22s - loss: 4.7811 - accuracy: 0.0126 - val_loss: 4.7849 - val_accuracy: 0.0112 Epoch 4/20 Epoch 00004: val_accuracy did not improve from 0.01125 409/409 - 21s - loss: 4.7794 - accuracy: 0.0126 - val_loss: 4.7848 - val_accuracy: 0.0112 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.01125 409/409 - 17s - loss: 4.7782 - accuracy: 0.0126 - val_loss: 4.7850 - val_accuracy: 0.0112 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.01125 409/409 - 20s - loss: 4.7774 - accuracy: 0.0126 - val_loss: 4.7852 - val_accuracy: 0.0112 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.01125 409/409 - 21s - loss: 4.7767 - accuracy: 0.0126 - val_loss: 4.7855 - val_accuracy: 0.0112 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.01125 409/409 - 22s - loss: 4.7763 - accuracy: 0.0126 - val_loss: 4.7859 - val_accuracy: 0.0112 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.01125 409/409 - 22s - loss: 4.7759 - accuracy: 0.0126 - val_loss: 4.7862 - val_accuracy: 0.0112 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.01125 409/409 - 21s - loss: 4.7757 - accuracy: 0.0126 - val_loss: 4.7865 - val_accuracy: 0.0112 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.01125 409/409 - 22s - loss: 4.7756 - accuracy: 0.0126 - val_loss: 4.7868 - val_accuracy: 0.0112 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.01125 409/409 - 22s - loss: 4.7755 - accuracy: 0.0126 - val_loss: 4.7871 - val_accuracy: 0.0112 Epoch 00012: early stopping Fitting model with fold no. 2 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.01076, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_cnn_fold1.h5 409/409 - 21s - loss: 5.7777 - accuracy: 0.0098 - val_loss: 4.7861 - val_accuracy: 0.0108 Epoch 2/20 Epoch 00002: val_accuracy did not improve from 0.01076 409/409 - 24s - loss: 4.7834 - accuracy: 0.0121 - val_loss: 4.7854 - val_accuracy: 0.0108 Epoch 3/20 Epoch 00003: val_accuracy did not improve from 0.01076 409/409 - 19s - loss: 4.7811 - accuracy: 0.0127 - val_loss: 4.7851 - val_accuracy: 0.0108 Epoch 4/20 Epoch 00004: val_accuracy did not improve from 0.01076 409/409 - 21s - loss: 4.7794 - accuracy: 0.0127 - val_loss: 4.7851 - val_accuracy: 0.0108 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.01076 409/409 - 20s - loss: 4.7782 - accuracy: 0.0127 - val_loss: 4.7855 - val_accuracy: 0.0108 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.01076 409/409 - 18s - loss: 4.7773 - accuracy: 0.0127 - val_loss: 4.7857 - val_accuracy: 0.0108 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.01076 409/409 - 19s - loss: 4.7767 - accuracy: 0.0127 - val_loss: 4.7862 - val_accuracy: 0.0108 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.01076 409/409 - 22s - loss: 4.7762 - accuracy: 0.0127 - val_loss: 4.7865 - val_accuracy: 0.0108 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.01076 409/409 - 22s - loss: 4.7759 - accuracy: 0.0127 - val_loss: 4.7868 - val_accuracy: 0.0108 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.01076 409/409 - 22s - loss: 4.7757 - accuracy: 0.0127 - val_loss: 4.7873 - val_accuracy: 0.0108 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.01076 409/409 - 22s - loss: 4.7755 - accuracy: 0.0127 - val_loss: 4.7875 - val_accuracy: 0.0108 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.01076 409/409 - 22s - loss: 4.7754 - accuracy: 0.0127 - val_loss: 4.7878 - val_accuracy: 0.0108 Epoch 00012: early stopping Fitting model with fold no. 3 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.00978, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_cnn_fold2.h5 409/409 - 22s - loss: 5.1348 - accuracy: 0.0103 - val_loss: 4.7852 - val_accuracy: 0.0098 Epoch 2/20 Epoch 00002: val_accuracy did not improve from 0.00978 409/409 - 23s - loss: 4.7847 - accuracy: 0.0116 - val_loss: 4.7839 - val_accuracy: 0.0088 Epoch 3/20 Epoch 00003: val_accuracy did not improve from 0.00978 409/409 - 24s - loss: 4.7821 - accuracy: 0.0116 - val_loss: 4.7831 - val_accuracy: 0.0088 Epoch 4/20 Epoch 00004: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7818 - accuracy: 0.0120 - val_loss: 4.7827 - val_accuracy: 0.0088 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7797 - accuracy: 0.0112 - val_loss: 4.7823 - val_accuracy: 0.0088 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.00978 409/409 - 23s - loss: 4.7790 - accuracy: 0.0103 - val_loss: 4.7822 - val_accuracy: 0.0088 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.00978 409/409 - 24s - loss: 4.7785 - accuracy: 0.0104 - val_loss: 4.7822 - val_accuracy: 0.0088 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.00978 409/409 - 24s - loss: 4.7781 - accuracy: 0.0121 - val_loss: 4.7822 - val_accuracy: 0.0088 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7778 - accuracy: 0.0121 - val_loss: 4.7824 - val_accuracy: 0.0088 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.00978 409/409 - 21s - loss: 4.7776 - accuracy: 0.0121 - val_loss: 4.7824 - val_accuracy: 0.0088 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.00978 409/409 - 21s - loss: 4.7775 - accuracy: 0.0117 - val_loss: 4.7824 - val_accuracy: 0.0088 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7774 - accuracy: 0.0108 - val_loss: 4.7826 - val_accuracy: 0.0088 Epoch 13/20 Epoch 00013: val_accuracy did not improve from 0.00978 409/409 - 23s - loss: 4.7773 - accuracy: 0.0112 - val_loss: 4.7827 - val_accuracy: 0.0088 Epoch 14/20 Epoch 00014: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7773 - accuracy: 0.0121 - val_loss: 4.7828 - val_accuracy: 0.0088 Epoch 15/20 Epoch 00015: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7772 - accuracy: 0.0106 - val_loss: 4.7829 - val_accuracy: 0.0088 Epoch 16/20 Epoch 00016: val_accuracy did not improve from 0.00978 409/409 - 22s - loss: 4.7772 - accuracy: 0.0119 - val_loss: 4.7829 - val_accuracy: 0.0088 Epoch 00016: early stopping Fitting model with fold no. 4 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.01174, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_cnn_fold3.h5 409/409 - 23s - loss: 6.3416 - accuracy: 0.0101 - val_loss: 4.7860 - val_accuracy: 0.0117 Epoch 2/20 Epoch 00002: val_accuracy did not improve from 0.01174 409/409 - 22s - loss: 4.7839 - accuracy: 0.0116 - val_loss: 4.7851 - val_accuracy: 0.0117 Epoch 3/20 Epoch 00003: val_accuracy did not improve from 0.01174 409/409 - 21s - loss: 4.7817 - accuracy: 0.0125 - val_loss: 4.7845 - val_accuracy: 0.0117 Epoch 4/20 Epoch 00004: val_accuracy did not improve from 0.01174 409/409 - 30s - loss: 4.7801 - accuracy: 0.0125 - val_loss: 4.7843 - val_accuracy: 0.0117 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.01174 409/409 - 24s - loss: 4.7790 - accuracy: 0.0125 - val_loss: 4.7844 - val_accuracy: 0.0117 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.01174 409/409 - 23s - loss: 4.7781 - accuracy: 0.0125 - val_loss: 4.7845 - val_accuracy: 0.0117 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.01174 409/409 - 24s - loss: 4.7775 - accuracy: 0.0125 - val_loss: 4.7847 - val_accuracy: 0.0117 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.01174 409/409 - 23s - loss: 4.7771 - accuracy: 0.0125 - val_loss: 4.7850 - val_accuracy: 0.0117 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.01174 409/409 - 22s - loss: 4.7768 - accuracy: 0.0125 - val_loss: 4.7852 - val_accuracy: 0.0117 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.01174 409/409 - 22s - loss: 4.7766 - accuracy: 0.0125 - val_loss: 4.7855 - val_accuracy: 0.0117 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.01174 409/409 - 24s - loss: 4.7764 - accuracy: 0.0125 - val_loss: 4.7857 - val_accuracy: 0.0117 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.01174 409/409 - 21s - loss: 4.7763 - accuracy: 0.0125 - val_loss: 4.7859 - val_accuracy: 0.0117 Epoch 00012: early stopping Fitting model with fold no. 5 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.01174, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_cnn_fold4.h5 409/409 - 21s - loss: 5.8805 - accuracy: 0.0103 - val_loss: 4.7865 - val_accuracy: 0.0117 Epoch 2/20 Epoch 00002: val_accuracy did not improve from 0.01174 409/409 - 20s - loss: 4.7832 - accuracy: 0.0125 - val_loss: 4.7861 - val_accuracy: 0.0117 Epoch 3/20 Epoch 00003: val_accuracy did not improve from 0.01174 409/409 - 20s - loss: 4.7807 - accuracy: 0.0120 - val_loss: 4.7861 - val_accuracy: 0.0117 Epoch 4/20 Epoch 00004: val_accuracy did not improve from 0.01174 409/409 - 20s - loss: 4.7788 - accuracy: 0.0125 - val_loss: 4.7865 - val_accuracy: 0.0117 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.01174 409/409 - 21s - loss: 4.7776 - accuracy: 0.0117 - val_loss: 4.7870 - val_accuracy: 0.0117 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.01174 409/409 - 20s - loss: 4.7766 - accuracy: 0.0125 - val_loss: 4.7876 - val_accuracy: 0.0117 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.01174 409/409 - 20s - loss: 4.7759 - accuracy: 0.0125 - val_loss: 4.7882 - val_accuracy: 0.0117 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.01174 409/409 - 20s - loss: 4.7754 - accuracy: 0.0125 - val_loss: 4.7888 - val_accuracy: 0.0117 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.01174 409/409 - 24s - loss: 4.7750 - accuracy: 0.0125 - val_loss: 4.7894 - val_accuracy: 0.0117 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.01174 409/409 - 24s - loss: 4.7748 - accuracy: 0.0125 - val_loss: 4.7898 - val_accuracy: 0.0117 Epoch 00010: early stopping
def plot_model_fold_stats(k, history):
fig, ax = plt.subplots(k, 2,figsize=(15,20))
print(ax.shape)
for i, h in enumerate(history):
ax[i][0].plot(h.history['accuracy'])
ax[i][0].plot(h.history['val_accuracy'])
ax[i][0].set_title(f'Model accuracy fold no. {i + 1}')
ax[i][0].set_ylabel('Accuracy')
ax[i][0].set_xlabel('Epoch')
ax[i][0].legend(['Train', 'Valid'], loc='upper left')
# Plot training & validation loss values
ax[i][1].plot(h.history['loss'])
ax[i][1].plot(h.history['val_loss'])
ax[i][1].set_title(f'Model loss fold no. {i + 1}')
ax[i][1].set_ylabel('Loss')
ax[i][1].set_xlabel('Epoch')
ax[i][1].legend(['Train', 'Valid'], loc='upper left')
fig.tight_layout(pad=3.0)
plt.show()
plot_model_fold_stats(k=k, history=cnn_history)
(5, 2)
import statistics
def print_model_last_epoch_stats(history):
final_acc = [acc for sublist in list(h.history['accuracy'][-1:] for h in history) for acc in sublist]
print(f"Mean fold accuracy: {round(statistics.mean(final_acc) * 100, 2)}%")
final_loss = [acc for sublist in list(h.history['loss'][-1:] for h in history) for acc in sublist]
print(f"Mean fold loss: {statistics.mean(final_loss)}")
valid_final_acc = [acc for sublist in list(h.history['val_accuracy'][-1:] for h in history) for acc in sublist]
print(f"Mean fold validation accuracy: {round(statistics.mean(valid_final_acc) * 100, 2)}%")
valid_final_loss = [acc for sublist in list(h.history['val_loss'][-1:] for h in history) for acc in sublist]
print(f"Mean fold validation loss: {statistics.mean(valid_final_loss)}")
print_model_last_epoch_stats(history=cnn_history)
The model did not manage to memorize the data, and the validation results are almost entirly random. Lets suggest a few ways that can possibly improve that.
def create_model_2(optimizer):
inp = Input((128, 128, 3))
# Block 1
x = Conv2D(16,(3, 3), activation='relu')(inp)
x = BatchNormalization(axis=3)(x) # Added batch normalization after each convolutional layer
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 2
x = Conv2D(32,(3, 3), activation='relu')(x)
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 3
x = Conv2D(48,(3, 3), activation='relu')(x) # Added 2 more convultional layers with filters sizes 48 and 64
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 4
x = Conv2D(64,(3, 3), activation='relu')(x)
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Fully Connected
x = Flatten()(x)
x = Dense(1024, activation='relu')(x) # Added 2 more fully connected 1D layers with sizes 1024 and 512 respectively
x = Dense(512, activation='relu')(x)
x = Dense(256, activation='relu')(x)
x = Dense(120, activation='softmax')(x)
model = Model(inputs=inp, outputs=x)
model.compile(loss='categorical_crossentropy',optimizer=optimizer, metrics=['accuracy'])
return model
create_model_2('adam').summary()
Model: "functional_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_1 (InputLayer) [(None, 128, 128, 3)] 0 _________________________________________________________________ conv2d (Conv2D) (None, 126, 126, 16) 448 _________________________________________________________________ batch_normalization (BatchNo (None, 126, 126, 16) 64 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 63, 63, 16) 0 _________________________________________________________________ conv2d_1 (Conv2D) (None, 61, 61, 32) 4640 _________________________________________________________________ batch_normalization_1 (Batch (None, 61, 61, 32) 128 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 30, 30, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 28, 28, 48) 13872 _________________________________________________________________ batch_normalization_2 (Batch (None, 28, 28, 48) 192 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 14, 14, 48) 0 _________________________________________________________________ conv2d_3 (Conv2D) (None, 12, 12, 64) 27712 _________________________________________________________________ batch_normalization_3 (Batch (None, 12, 12, 64) 256 _________________________________________________________________ max_pooling2d_3 (MaxPooling2 (None, 6, 6, 64) 0 _________________________________________________________________ flatten (Flatten) (None, 2304) 0 _________________________________________________________________ dense (Dense) (None, 1024) 2360320 _________________________________________________________________ dense_1 (Dense) (None, 512) 524800 _________________________________________________________________ dense_2 (Dense) (None, 256) 131328 _________________________________________________________________ dense_3 (Dense) (None, 120) 30840 ================================================================= Total params: 3,094,600 Trainable params: 3,094,280 Non-trainable params: 320 _________________________________________________________________
train_folds = generate_kfold(np.zeros(train_count), Y_train)
Fold 1 => train count: 8177, valid count: 2045
id
count
breed
affenpinscher 19
afghan_hound 21
african_hunting_dog 13
airedale 23
american_staffordshire_terrier 13
... ...
welsh_springer_spaniel 22
west_highland_white_terrier 17
whippet 22
wire-haired_fox_terrier 10
yorkshire_terrier 20
[120 rows x 1 columns]
Fold 2 => train count: 8177, valid count: 2045
id
count
breed
affenpinscher 15
afghan_hound 19
african_hunting_dog 19
airedale 23
american_staffordshire_terrier 11
... ...
welsh_springer_spaniel 17
west_highland_white_terrier 12
whippet 17
wire-haired_fox_terrier 14
yorkshire_terrier 13
[120 rows x 1 columns]
Fold 3 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 16
afghan_hound 32
african_hunting_dog 17
airedale 18
american_staffordshire_terrier 16
... ...
welsh_springer_spaniel 16
west_highland_white_terrier 20
whippet 14
wire-haired_fox_terrier 23
yorkshire_terrier 20
[120 rows x 1 columns]
Fold 4 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 12
afghan_hound 22
african_hunting_dog 16
airedale 23
american_staffordshire_terrier 14
... ...
welsh_springer_spaniel 14
west_highland_white_terrier 12
whippet 22
wire-haired_fox_terrier 22
yorkshire_terrier 17
[120 rows x 1 columns]
Fold 5 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 18
afghan_hound 22
african_hunting_dog 21
airedale 20
american_staffordshire_terrier 20
... ...
welsh_springer_spaniel 10
west_highland_white_terrier 20
whippet 20
wire-haired_fox_terrier 13
yorkshire_terrier 12
[120 rows x 1 columns]
def train_simple_cnn_model2(folds, optimizer, image_generator, epochs, dims):
hist = []
for idx, fold in enumerate(folds):
print(f'\n\nFitting model with fold no. {idx + 1}')
train_data_generator = image_generator.flow_from_dataframe(dataframe=fold['train'],target_size=dims, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
valid_data_generator = image_generator.flow_from_dataframe(dataframe=fold['valid'], target_size=dims, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
callbacks = get_callbacks(saved_model_name=f'simple_cnn_fold{idx}')
model = create_model_2(optimizer)
history = model.fit(x=train_data_generator, validation_data=valid_data_generator, callbacks=callbacks, epochs=epochs, verbose=2)
hist.append(history)
return hist
simple_cnn_history = train_simple_cnn_model2(folds=train_folds, optimizer='adam', image_generator=ImageDataGenerator(), epochs=20, dims=images_shape)
Fitting model with fold no. 1 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.01711, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold0.h5 409/409 - 21s - loss: 4.7070 - accuracy: 0.0241 - val_loss: 5.2388 - val_accuracy: 0.0171 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.01711 to 0.03961, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold0.h5 409/409 - 22s - loss: 4.4005 - accuracy: 0.0472 - val_loss: 4.4872 - val_accuracy: 0.0396 Epoch 3/20 Epoch 00003: val_accuracy improved from 0.03961 to 0.05379, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold0.h5 409/409 - 19s - loss: 4.1758 - accuracy: 0.0679 - val_loss: 4.3762 - val_accuracy: 0.0538 Epoch 4/20 Epoch 00004: val_accuracy did not improve from 0.05379 409/409 - 20s - loss: 3.9178 - accuracy: 0.1019 - val_loss: 4.2968 - val_accuracy: 0.0533 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.05379 409/409 - 22s - loss: 3.5707 - accuracy: 0.1542 - val_loss: 4.8098 - val_accuracy: 0.0513 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.05379 409/409 - 20s - loss: 3.0713 - accuracy: 0.2360 - val_loss: 5.2907 - val_accuracy: 0.0523 Epoch 7/20 Epoch 00007: val_accuracy improved from 0.05379 to 0.06357, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold0.h5 409/409 - 21s - loss: 2.3975 - accuracy: 0.3653 - val_loss: 5.1638 - val_accuracy: 0.0636 Epoch 8/20 Epoch 00008: val_accuracy improved from 0.06357 to 0.07531, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold0.h5 409/409 - 21s - loss: 1.5909 - accuracy: 0.5568 - val_loss: 5.6397 - val_accuracy: 0.0753 Epoch 9/20 Epoch 00009: val_accuracy improved from 0.07531 to 0.07628, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold0.h5 409/409 - 21s - loss: 0.8962 - accuracy: 0.7440 - val_loss: 6.6895 - val_accuracy: 0.0763 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.07628 409/409 - 21s - loss: 0.5397 - accuracy: 0.8413 - val_loss: 7.5689 - val_accuracy: 0.0680 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.07628 409/409 - 22s - loss: 0.3384 - accuracy: 0.9019 - val_loss: 8.1949 - val_accuracy: 0.0601 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.07628 409/409 - 22s - loss: 0.2952 - accuracy: 0.9145 - val_loss: 8.6949 - val_accuracy: 0.0577 Epoch 00012: early stopping Fitting model with fold no. 2 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.01858, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold1.h5 409/409 - 23s - loss: 4.7121 - accuracy: 0.0231 - val_loss: 4.6683 - val_accuracy: 0.0186 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.01858 to 0.04597, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold1.h5 409/409 - 22s - loss: 4.4117 - accuracy: 0.0424 - val_loss: 4.4195 - val_accuracy: 0.0460 Epoch 3/20 Epoch 00003: val_accuracy did not improve from 0.04597 409/409 - 20s - loss: 4.1921 - accuracy: 0.0655 - val_loss: 4.5684 - val_accuracy: 0.0347 Epoch 4/20 Epoch 00004: val_accuracy improved from 0.04597 to 0.05770, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold1.h5 409/409 - 20s - loss: 3.9532 - accuracy: 0.0890 - val_loss: 4.4178 - val_accuracy: 0.0577 Epoch 5/20 Epoch 00005: val_accuracy improved from 0.05770 to 0.07042, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold1.h5 409/409 - 21s - loss: 3.6297 - accuracy: 0.1300 - val_loss: 4.3246 - val_accuracy: 0.0704 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.07042 409/409 - 20s - loss: 3.1528 - accuracy: 0.2111 - val_loss: 4.7491 - val_accuracy: 0.0611 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.07042 409/409 - 20s - loss: 2.4785 - accuracy: 0.3435 - val_loss: 5.1234 - val_accuracy: 0.0685 Epoch 8/20 Epoch 00008: val_accuracy improved from 0.07042 to 0.07482, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold1.h5 409/409 - 20s - loss: 1.6851 - accuracy: 0.5286 - val_loss: 6.0705 - val_accuracy: 0.0748 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.07482 409/409 - 20s - loss: 0.9889 - accuracy: 0.7169 - val_loss: 6.6320 - val_accuracy: 0.0660 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.07482 409/409 - 20s - loss: 0.5417 - accuracy: 0.8435 - val_loss: 7.5346 - val_accuracy: 0.0680 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.07482 409/409 - 20s - loss: 0.3167 - accuracy: 0.9053 - val_loss: 8.7848 - val_accuracy: 0.0738 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.07482 409/409 - 20s - loss: 0.3003 - accuracy: 0.9097 - val_loss: 8.8966 - val_accuracy: 0.0694 Epoch 13/20 Epoch 00013: val_accuracy did not improve from 0.07482 409/409 - 20s - loss: 0.2888 - accuracy: 0.9145 - val_loss: 9.2961 - val_accuracy: 0.0694 Epoch 00013: early stopping Fitting model with fold no. 3 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.02838, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold2.h5 409/409 - 21s - loss: 4.7187 - accuracy: 0.0210 - val_loss: 4.6014 - val_accuracy: 0.0284 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.02838 to 0.03669, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold2.h5 409/409 - 21s - loss: 4.4174 - accuracy: 0.0415 - val_loss: 4.5062 - val_accuracy: 0.0367 Epoch 3/20 Epoch 00003: val_accuracy improved from 0.03669 to 0.04795, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold2.h5 409/409 - 21s - loss: 4.2133 - accuracy: 0.0581 - val_loss: 4.4810 - val_accuracy: 0.0479 Epoch 4/20 Epoch 00004: val_accuracy improved from 0.04795 to 0.04843, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold2.h5 409/409 - 20s - loss: 3.9954 - accuracy: 0.0843 - val_loss: 4.4877 - val_accuracy: 0.0484 Epoch 5/20 Epoch 00005: val_accuracy improved from 0.04843 to 0.07436, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold2.h5 409/409 - 20s - loss: 3.6906 - accuracy: 0.1301 - val_loss: 4.3415 - val_accuracy: 0.0744 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.07436 409/409 - 20s - loss: 3.2181 - accuracy: 0.2071 - val_loss: 4.4620 - val_accuracy: 0.0587 Epoch 7/20 Epoch 00007: val_accuracy improved from 0.07436 to 0.09149, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold2.h5 409/409 - 21s - loss: 2.5619 - accuracy: 0.3322 - val_loss: 4.7313 - val_accuracy: 0.0915 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.09149 409/409 - 20s - loss: 1.7678 - accuracy: 0.5086 - val_loss: 5.3195 - val_accuracy: 0.0841 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.09149 409/409 - 21s - loss: 1.0246 - accuracy: 0.7046 - val_loss: 6.1840 - val_accuracy: 0.0685 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.09149 409/409 - 20s - loss: 0.5847 - accuracy: 0.8288 - val_loss: 7.4508 - val_accuracy: 0.0822 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.09149 409/409 - 20s - loss: 0.3913 - accuracy: 0.8825 - val_loss: 7.9414 - val_accuracy: 0.0788 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.09149 409/409 - 21s - loss: 0.2957 - accuracy: 0.9121 - val_loss: 8.1895 - val_accuracy: 0.0778 Epoch 13/20 Epoch 00013: val_accuracy did not improve from 0.09149 409/409 - 22s - loss: 0.2594 - accuracy: 0.9233 - val_loss: 9.2830 - val_accuracy: 0.0768 Epoch 00013: early stopping Fitting model with fold no. 4 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.02789, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 23s - loss: 4.7344 - accuracy: 0.0205 - val_loss: 4.8426 - val_accuracy: 0.0279 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.02789 to 0.03327, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 23s - loss: 4.4390 - accuracy: 0.0433 - val_loss: 4.5019 - val_accuracy: 0.0333 Epoch 3/20 Epoch 00003: val_accuracy improved from 0.03327 to 0.04892, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 22s - loss: 4.1948 - accuracy: 0.0644 - val_loss: 4.5055 - val_accuracy: 0.0489 Epoch 4/20 Epoch 00004: val_accuracy improved from 0.04892 to 0.05969, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 23s - loss: 3.9801 - accuracy: 0.0854 - val_loss: 4.5600 - val_accuracy: 0.0597 Epoch 5/20 Epoch 00005: val_accuracy improved from 0.05969 to 0.08072, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 23s - loss: 3.6869 - accuracy: 0.1275 - val_loss: 4.1983 - val_accuracy: 0.0807 Epoch 6/20 Epoch 00006: val_accuracy did not improve from 0.08072 409/409 - 23s - loss: 3.2747 - accuracy: 0.1893 - val_loss: 4.7089 - val_accuracy: 0.0607 Epoch 7/20 Epoch 00007: val_accuracy improved from 0.08072 to 0.08268, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 22s - loss: 2.6822 - accuracy: 0.3024 - val_loss: 4.5725 - val_accuracy: 0.0827 Epoch 8/20 Epoch 00008: val_accuracy improved from 0.08268 to 0.09785, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold3.h5 409/409 - 22s - loss: 1.9299 - accuracy: 0.4685 - val_loss: 5.4570 - val_accuracy: 0.0978 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.09785 409/409 - 22s - loss: 1.2110 - accuracy: 0.6503 - val_loss: 6.2138 - val_accuracy: 0.0832 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.09785 409/409 - 23s - loss: 0.6908 - accuracy: 0.7989 - val_loss: 6.5875 - val_accuracy: 0.0871 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.09785 409/409 - 22s - loss: 0.4146 - accuracy: 0.8774 - val_loss: 7.6676 - val_accuracy: 0.0876 Epoch 12/20 Epoch 00012: val_accuracy did not improve from 0.09785 409/409 - 23s - loss: 0.2936 - accuracy: 0.9127 - val_loss: 8.3077 - val_accuracy: 0.0739 Epoch 13/20 Epoch 00013: val_accuracy did not improve from 0.09785 409/409 - 22s - loss: 0.2720 - accuracy: 0.9213 - val_loss: 9.4473 - val_accuracy: 0.0705 Epoch 00013: early stopping Fitting model with fold no. 5 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.02104, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold4.h5 409/409 - 22s - loss: 4.7286 - accuracy: 0.0225 - val_loss: 4.9363 - val_accuracy: 0.0210 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.02104 to 0.03033, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold4.h5 409/409 - 23s - loss: 4.4589 - accuracy: 0.0372 - val_loss: 4.5216 - val_accuracy: 0.0303 Epoch 3/20 Epoch 00003: val_accuracy improved from 0.03033 to 0.04110, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold4.h5 409/409 - 23s - loss: 4.2354 - accuracy: 0.0576 - val_loss: 4.4184 - val_accuracy: 0.0411 Epoch 4/20 Epoch 00004: val_accuracy improved from 0.04110 to 0.06067, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold4.h5 409/409 - 23s - loss: 4.0011 - accuracy: 0.0817 - val_loss: 4.4478 - val_accuracy: 0.0607 Epoch 5/20 Epoch 00005: val_accuracy did not improve from 0.06067 409/409 - 21s - loss: 3.7085 - accuracy: 0.1263 - val_loss: 4.7899 - val_accuracy: 0.0440 Epoch 6/20 Epoch 00006: val_accuracy improved from 0.06067 to 0.07926, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_simple_cnn_fold4.h5 409/409 - 21s - loss: 3.2799 - accuracy: 0.1920 - val_loss: 4.5028 - val_accuracy: 0.0793 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.07926 409/409 - 20s - loss: 2.6792 - accuracy: 0.2995 - val_loss: 4.7880 - val_accuracy: 0.0763 Epoch 8/20 Epoch 00008: val_accuracy did not improve from 0.07926 409/409 - 20s - loss: 1.9272 - accuracy: 0.4759 - val_loss: 5.4335 - val_accuracy: 0.0768 Epoch 9/20 Epoch 00009: val_accuracy did not improve from 0.07926 409/409 - 20s - loss: 1.1700 - accuracy: 0.6601 - val_loss: 6.3105 - val_accuracy: 0.0749 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.07926 409/409 - 22s - loss: 0.6804 - accuracy: 0.7942 - val_loss: 7.1335 - val_accuracy: 0.0612 Epoch 11/20 Epoch 00011: val_accuracy did not improve from 0.07926 409/409 - 19s - loss: 0.4497 - accuracy: 0.8630 - val_loss: 7.7390 - val_accuracy: 0.0690 Epoch 00011: early stopping
plot_model_fold_stats(k=k, history=simple_cnn_history)
(5, 2)
print_model_last_epoch_stats(history=simple_cnn_history)
Mean fold accuracy: 90.73% Mean fold loss: 31.3% Mean fold validation accuracy: 6.87% Mean fold validation loss: 889.21%
plot confusion matrix to try and get a visual idea of how our second model classified our labeled data Since we do not have labeled test data in this dataset, we will use one of the models trained in a fold and use the labeled validation data as the true labels.
import itertools
def plot_confusion_matrix(cm, labels, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues):
plt.figure(figsize = (28,28))
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
plt.grid(None)
tick_marks = np.arange(len(labels))
plt.xticks(tick_marks, labels, rotation=90)
plt.yticks(tick_marks, labels)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.show()
from sklearn.metrics import confusion_matrix
from collections import Counter
from functools import cmp_to_key
def calculate_model_metrics(saved_model_prefix, create_model_func, folds, dims, preprocess_func=lambda s: s, dtype=np.uint8, fold_i=None):
fold_idx = random.choice(range(len(folds))) if fold_i is None else fold_i
fold = folds[fold_idx]['valid']
model = create_model_func()
model.load_weights(join(model_save_dir, f"model_{saved_model_prefix}{fold_idx}.h5"))
X = np.zeros((fold.shape[0], dims, dims, 3), dtype=dtype)
for i in range(fold.shape[0]):
X[i] = preprocess_func(cv2.resize(cv2.imread(join(train_dir, f"{fold['id'].iloc[i]}")), (dims, dims)))
y_pred = model.predict(X)
y_enc = one_hot_encode(fold['breed'].to_numpy())
y_pred_classes = np.argmax(y_pred,axis = 1)
y_true = np.argmax(y_enc, axis=1)
y_pred_labeled = [{'pred_label': index_to_label[pred], 'true_label': index_to_label[y_true[i]],
'filename': fold['id'].iloc[i], 'perc': np.amax(y_pred[i])} for i, pred in enumerate(y_pred_classes)]
correct_pred = list(filter(lambda pred: pred['pred_label'] == pred['true_label'], y_pred_labeled))
worst_wrong_pred = sorted(list(filter(lambda pred: pred['pred_label'] != pred['true_label'], y_pred_labeled)), key=lambda e: e['perc'], reverse=True)[:10]
correct_perc = len(correct_pred) / fold.shape[0]
return {'confusion':confusion_matrix(y_true, y_pred_classes), 'correct': sorted(correct_pred, key=lambda e: e['perc'], reverse=True)[:10],
'wrong': worst_wrong_pred, 'correct_perc': correct_perc}
def read_and_plot_correct_wrong(correct, wrong):
num_imgs = len(correct)
def read_imgs(lst, is_samples=False):
filter_lst = list(e['filename'] if not is_samples else e for e in lst)
X = np.zeros((num_imgs,224,224,3))
for i, img in enumerate(filter_lst):
arr = read_and_resize_single_img(join(train_dir, img.iloc[0] if is_samples else img),224,224,4)
X[i] = np.float16(arr)
return X
correct_imgs = read_imgs(correct)
wrong_imgs = read_imgs(wrong)
wrongs_true_breeds = list(wrong[i]['pred_label'] for i in range(num_imgs))
wrong_true_sample_filenames = [labeled_data.loc[labeled_data['breed'] == breed].sample()['id'] for breed in wrongs_true_breeds]
wrong_true_samples = read_imgs(wrong_true_sample_filenames, is_samples=True)
fig = plt.figure(1, figsize=(35, 5))
fig.suptitle('Correct Predictions - TOP 10 HIGHEST %', fontsize=20)
grid = ImageGrid(fig, 111, nrows_ncols=(1, num_imgs), axes_pad=0.05)
for i in range(num_imgs):
ax = grid[i]
ax.imshow(correct_imgs[i] / 255.)
ax.axis('off')
ax.text(0, 0, 'True label', color='white', backgroundcolor='green', alpha=0.8, fontsize=13)
ax.text(0, 20, '%s' % correct[i]['true_label'], color='k', backgroundcolor='w', alpha=0.8, fontsize=13)
ax.text(0, 224, 'Predicted label', color='white', backgroundcolor='green', alpha=0.8, fontsize=13)
ax.text(0, 244, '%s - %s' % (correct[i]['pred_label'], round((correct[i]['perc'] * 100), 2)) + '%' , color='k', backgroundcolor='w', alpha=0.8, fontsize=13)
plt.show()
fig = plt.figure(1, figsize=(35, 8))
fig.suptitle('Wrong Predictions - TOP 10 HIGHEST %', fontsize=20)
grid = ImageGrid(fig, 111, nrows_ncols=(2, num_imgs), axes_pad=0.05)
for i in range(num_imgs * 2):
ax = grid[i]
ax.axis('off')
if(i < num_imgs):
ax.imshow(wrong_imgs[i] / 255.)
ax.text(0, 0, 'True label', color='white', backgroundcolor='green', alpha=0.8, fontsize=13)
ax.text(0, 20, '%s' % wrong[i]['true_label'], color='k', backgroundcolor='w', alpha=0.8, fontsize=13)
else:
ax.imshow(wrong_true_samples[i - num_imgs] / 255.)
ax.text(0, 0, 'Predicted label', color='white', backgroundcolor='red', alpha=0.8, fontsize=13)
ax.text(0, 20, '%s - %s' % (wrong[i - num_imgs]['pred_label'], round((wrong[i - num_imgs]['perc'] * 100), 2)) + '%' , color='k', backgroundcolor='w', alpha=0.8, fontsize=13)
plt.show()
simple_cnn_metrics = calculate_model_metrics(saved_model_prefix='simple_cnn_fold', dims=128, create_model_func=lambda : create_model_2('adam'), folds=train_folds)
plot_confusion_matrix(simple_cnn_metrics['confusion'], labels=labels, title="Confusion Matrix Simple CNN 2")
read_and_plot_correct_wrong(correct=simple_cnn_metrics['correct'], wrong=simple_cnn_metrics['wrong'])
print('Correct percentage: {}%'.format(round(simple_cnn_metrics['correct_perc'] * 100, 2)))
Correct percentage: 0.83%
from tqdm import tqdm
import cv2
sample_submission = pd.read_csv(join(data_dir, 'sample_submission.csv'))
submission_columns = sample_submission[sample_submission.columns.difference(['id'])].columns
def get_test_samples(preprocess_func, dims, dtype=np.uint8):
print(f"Loading test data\n")
X_test = np.zeros((test_count, dims, dims, 3), dtype=dtype)
print(X_test.shape)
test_ids = []
for i in tqdm(range(test_count)):
X_test[i] = preprocess_func(cv2.resize(cv2.imread(join(test_dir, f"{sample_submission['id'][i]}.jpg")), (dims, dims)))
test_ids.append(sample_submission['id'][i])
return X_test, test_ids
def get_prediction_dataframes(X_test, test_ids, dims, saved_model_prefix, create_model_func):
dataframes = []
for fold_idx in range(k):
print(f"Loading best model for fold no. {fold_idx + 1}")
model = create_model_func()
model.load_weights(join(model_save_dir, f"{saved_model_prefix}{fold_idx}.h5"))
predictions = model.predict(X_test, batch_size=BATCH_SIZE, verbose=2)
submission_res = pd.DataFrame(data=predictions, index=test_ids, columns=np.array([*label_to_index]))
submission_res = submission_res.sort_index(axis=1)
submission_res.index.name = 'id'
dataframes.append(submission_res)
return dataframes
def create_submission_file(X_test, test_ids, name, dims, saved_model_prefix, create_model_func):
submission_res = pd.concat(get_prediction_dataframes(X_test, test_ids, dims=dims, saved_model_prefix=saved_model_prefix, create_model_func=create_model_func)).groupby(level=0).mean()
submission_res.to_csv(join(data_dir, name), encoding='utf-8', index=True)
submission_data = pd.read_csv(join(data_dir, name))
return submission_data
X_test, test_ids = get_test_samples(preprocess_func=lambda s: s, dims=128)
submission_data = create_submission_file(X_test=X_test, test_ids=test_ids, name='simple_submission.csv', dims=128, saved_model_prefix='model_simple_cnn_fold', create_model_func= lambda : create_model_2('adam'))
submission_data
0%| | 4/10357 [00:00<04:36, 37.48it/s]
Loading test data (10357, 128, 128, 3)
100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 10357/10357 [00:53<00:00, 192.08it/s]
Loading best model for fold no. 1 518/518 - 3s Loading best model for fold no. 2 WARNING:tensorflow:Callbacks method `on_predict_batch_end` is slow compared to the batch time (batch time: 0.0020s vs `on_predict_batch_end` time: 0.0040s). Check your callbacks. 518/518 - 3s Loading best model for fold no. 3 518/518 - 3s Loading best model for fold no. 4 518/518 - 3s Loading best model for fold no. 5 518/518 - 3s
| id | affenpinscher | afghan_hound | african_hunting_dog | airedale | american_staffordshire_terrier | appenzeller | australian_terrier | basenji | basset | ... | toy_poodle | toy_terrier | vizsla | walker_hound | weimaraner | welsh_springer_spaniel | west_highland_white_terrier | whippet | wire-haired_fox_terrier | yorkshire_terrier | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 000621fb3cbb32d8935728e48679680e | 0.000064 | 0.004077 | 0.000197 | 0.001823 | 0.004612 | 0.001564 | 0.000410 | 0.000113 | 0.000862 | ... | 0.000790 | 0.000590 | 0.005122 | 0.013338 | 0.000745 | 0.000027 | 0.007362 | 0.004491 | 0.000181 | 0.000116 |
| 1 | 00102ee9d8eb90812350685311fe5890 | 0.008168 | 0.000020 | 0.013558 | 0.000009 | 0.001243 | 0.000093 | 0.000035 | 0.000024 | 0.000277 | ... | 0.001595 | 0.000021 | 0.000094 | 0.003868 | 0.000035 | 0.000003 | 0.000047 | 0.000908 | 0.000003 | 0.000017 |
| 2 | 0012a730dfa437f5f3613fb75efcd4ce | 0.000865 | 0.000585 | 0.000101 | 0.001102 | 0.027626 | 0.000494 | 0.000003 | 0.002172 | 0.000261 | ... | 0.022721 | 0.001998 | 0.000423 | 0.003136 | 0.000510 | 0.000269 | 0.004785 | 0.030969 | 0.001300 | 0.000009 |
| 3 | 001510bc8570bbeee98c8d80c8a95ec1 | 0.000871 | 0.010698 | 0.004396 | 0.006734 | 0.004085 | 0.005849 | 0.018347 | 0.027267 | 0.020501 | ... | 0.012595 | 0.001418 | 0.005316 | 0.003150 | 0.002536 | 0.000408 | 0.002017 | 0.007825 | 0.003881 | 0.000873 |
| 4 | 001a5f3114548acdefa3d4da05474c2e | 0.000061 | 0.000360 | 0.000121 | 0.005975 | 0.000643 | 0.001025 | 0.040158 | 0.000345 | 0.032338 | ... | 0.000926 | 0.000005 | 0.001153 | 0.000258 | 0.000741 | 0.000013 | 0.000191 | 0.002294 | 0.000898 | 0.000357 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 10352 | ffeda8623d4eee33c6d1156a2ecbfcf8 | 0.002060 | 0.001003 | 0.000372 | 0.000033 | 0.002026 | 0.000056 | 0.000078 | 0.000231 | 0.000318 | ... | 0.000683 | 0.000036 | 0.000031 | 0.003062 | 0.000019 | 0.000006 | 0.000385 | 0.013999 | 0.000052 | 0.000004 |
| 10353 | fff1ec9e6e413275984966f745a313b0 | 0.000110 | 0.005332 | 0.000157 | 0.019785 | 0.002992 | 0.004356 | 0.003990 | 0.014155 | 0.001546 | ... | 0.005515 | 0.000543 | 0.003767 | 0.000547 | 0.022557 | 0.001472 | 0.002641 | 0.020190 | 0.004028 | 0.001797 |
| 10354 | fff74b59b758bbbf13a5793182a9bbe4 | 0.000167 | 0.006971 | 0.000238 | 0.005577 | 0.001246 | 0.007994 | 0.005057 | 0.000968 | 0.001030 | ... | 0.009600 | 0.000198 | 0.006677 | 0.000267 | 0.001035 | 0.000287 | 0.008515 | 0.001153 | 0.000754 | 0.001159 |
| 10355 | fff7d50d848e8014ac1e9172dc6762a3 | 0.000567 | 0.000924 | 0.000229 | 0.000023 | 0.005878 | 0.000066 | 0.000010 | 0.000336 | 0.000009 | ... | 0.001927 | 0.000003 | 0.009985 | 0.001471 | 0.000082 | 0.000009 | 0.000271 | 0.000077 | 0.000004 | 0.000004 |
| 10356 | fffbff22c1f51e3dc80c4bf04089545b | 0.002122 | 0.000248 | 0.002016 | 0.001410 | 0.010162 | 0.000587 | 0.000478 | 0.001989 | 0.000805 | ... | 0.001607 | 0.000437 | 0.054990 | 0.003588 | 0.000115 | 0.000027 | 0.003463 | 0.001091 | 0.000836 | 0.000036 |
10357 rows × 121 columns
We can see that are model misclassifies mainly because of overfitting,
The improvements that will improve us the most are improvements that will fight the overfitting problem
while keeping our model training time reasonable.
display_image("statisfied.png")
from sklearn.model_selection import KFold, StratifiedKFold
def generate_weighted_kfold(X, Y, k=5):
kf = StratifiedKFold(n_splits=k, shuffle=True, random_state=1)
folds = []
for idx, (train_index, val_index) in enumerate(kf.split(X, Y)):
training_data = labeled_data.iloc[train_index]
validation_data = labeled_data.iloc[val_index]
folds.append({'train': training_data, 'valid': validation_data})
print(f"Fold {idx + 1} => train count: {len(folds[idx]['train'])}, valid count: {len(folds[idx]['valid'])}")
print(folds[idx]['valid'].groupby(['breed']).agg(['count']))
return folds
train_weighted_folds = generate_weighted_kfold(np.zeros(train_count), Y_train)
Fold 1 => train count: 8177, valid count: 2045
id
count
breed
affenpinscher 16
afghan_hound 23
african_hunting_dog 17
airedale 21
american_staffordshire_terrier 15
... ...
welsh_springer_spaniel 16
west_highland_white_terrier 17
whippet 19
wire-haired_fox_terrier 17
yorkshire_terrier 16
[120 rows x 1 columns]
Fold 2 => train count: 8177, valid count: 2045
id
count
breed
affenpinscher 16
afghan_hound 23
african_hunting_dog 17
airedale 21
american_staffordshire_terrier 15
... ...
welsh_springer_spaniel 16
west_highland_white_terrier 16
whippet 19
wire-haired_fox_terrier 17
yorkshire_terrier 16
[120 rows x 1 columns]
Fold 3 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 16
afghan_hound 23
african_hunting_dog 17
airedale 22
american_staffordshire_terrier 15
... ...
welsh_springer_spaniel 16
west_highland_white_terrier 16
whippet 19
wire-haired_fox_terrier 16
yorkshire_terrier 16
[120 rows x 1 columns]
Fold 4 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 16
afghan_hound 24
african_hunting_dog 17
airedale 22
american_staffordshire_terrier 14
... ...
welsh_springer_spaniel 15
west_highland_white_terrier 16
whippet 19
wire-haired_fox_terrier 16
yorkshire_terrier 17
[120 rows x 1 columns]
Fold 5 => train count: 8178, valid count: 2044
id
count
breed
affenpinscher 16
afghan_hound 23
african_hunting_dog 18
airedale 21
american_staffordshire_terrier 15
... ...
welsh_springer_spaniel 16
west_highland_white_terrier 16
whippet 19
wire-haired_fox_terrier 16
yorkshire_terrier 17
[120 rows x 1 columns]
The goal is to address the suggestions for improvement suggested above to overcome the overfitting problem we encountered in the first iteration of the model
# Add dropout layers in the fully connected layers to avoid overfitting
def create_improved_model(optimizer):
inp = Input((224, 224, 3)) # increased expected input size to better preserve features
# Block 1
x = Conv2D(16,(3, 3), activation='relu')(inp)
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 2
x = Conv2D(32,(3, 3), activation='relu')(x)
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 3
x = Conv2D(64,(3, 3), activation='relu')(x)
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 4
x = Conv2D(128,(3, 3), activation='relu')(x) # Added 128 filters convolutional layer and removed 48 filters layer to preserve exponential filter dimensions growth consistency
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
# Block 5
x = Conv2D(256,(3, 3), activation='relu')(x) # Added 256 filters convolutional layer
x = BatchNormalization(axis=3)(x)
x = MaxPool2D(pool_size=(2,2), strides=2)(x)
x = GlobalAveragePooling2D()(x) # Added global average pooling before dense layer
# Fully Connected
x = Flatten()(x)
x = Dropout(0.5)(x) # Added dropout layers to avoid memorization of feature in dense layer (due to overfitting in previous model)
x = Dense(512, activation='relu')(x) # removed fully connected layer with size 1024 to decrease parameter count to try make the learning process quicker
x = Dropout(0.5)(x)
x = Dense(256, activation='relu')(x)
x = Dense(120, activation='softmax')(x)
model = Model(inputs=inp, outputs=x)
model.compile(loss='categorical_crossentropy',optimizer=optimizer, metrics=['accuracy'])
return model
create_improved_model('adam').summary()
Model: "functional_13" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_7 (InputLayer) [(None, 224, 224, 3)] 0 _________________________________________________________________ conv2d_30 (Conv2D) (None, 222, 222, 16) 448 _________________________________________________________________ batch_normalization_30 (Batc (None, 222, 222, 16) 64 _________________________________________________________________ max_pooling2d_26 (MaxPooling (None, 111, 111, 16) 0 _________________________________________________________________ conv2d_31 (Conv2D) (None, 109, 109, 32) 4640 _________________________________________________________________ batch_normalization_31 (Batc (None, 109, 109, 32) 128 _________________________________________________________________ max_pooling2d_27 (MaxPooling (None, 54, 54, 32) 0 _________________________________________________________________ conv2d_32 (Conv2D) (None, 52, 52, 64) 18496 _________________________________________________________________ batch_normalization_32 (Batc (None, 52, 52, 64) 256 _________________________________________________________________ max_pooling2d_28 (MaxPooling (None, 26, 26, 64) 0 _________________________________________________________________ conv2d_33 (Conv2D) (None, 24, 24, 128) 73856 _________________________________________________________________ batch_normalization_33 (Batc (None, 24, 24, 128) 512 _________________________________________________________________ max_pooling2d_29 (MaxPooling (None, 12, 12, 128) 0 _________________________________________________________________ conv2d_34 (Conv2D) (None, 10, 10, 256) 295168 _________________________________________________________________ batch_normalization_34 (Batc (None, 10, 10, 256) 1024 _________________________________________________________________ max_pooling2d_30 (MaxPooling (None, 5, 5, 256) 0 _________________________________________________________________ global_average_pooling2d_6 ( (None, 256) 0 _________________________________________________________________ flatten_6 (Flatten) (None, 256) 0 _________________________________________________________________ dropout_12 (Dropout) (None, 256) 0 _________________________________________________________________ dense_18 (Dense) (None, 512) 131584 _________________________________________________________________ dropout_13 (Dropout) (None, 512) 0 _________________________________________________________________ dense_19 (Dense) (None, 256) 131328 _________________________________________________________________ dense_20 (Dense) (None, 120) 30840 ================================================================= Total params: 688,344 Trainable params: 687,352 Non-trainable params: 992 _________________________________________________________________
large_image_size = (224, 224)
rescale_gen = ImageDataGenerator(rescale=1./255) # normalize input to 0-1 to increase convergence speed
BATCH_SIZE = 20
def train_improved_simple_cnn_model(folds, train_image_generator, valid_image_generator, epochs, dims, prefix='improved'):
hist = []
for idx, fold in enumerate(folds):
print(f'\n\nFitting model with fold no. {idx + 1}')
train_data_generator = train_image_generator.flow_from_dataframe(dataframe=fold['train'], target_size=dims, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
valid_data_generator = valid_image_generator.flow_from_dataframe(dataframe=fold['valid'], target_size=dims, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
callbacks = get_callbacks(saved_model_name=f'{prefix}_simple_cnn_fold{idx}')
model = create_improved_model(optimizer=Adam(learning_rate=0.001)) # lowered learning rate from default for Adam(=0.01) to 0.001
history = model.fit(x=train_data_generator, validation_data=valid_data_generator, callbacks=callbacks, epochs=epochs, verbose=2)
hist.append(history)
return hist
# Increased the epoch count to 40 and lowered learning rate to try and converge in more iterations without diverging
improved_cnn_history = train_improved_simple_cnn_model(folds=train_weighted_folds, train_image_generator=rescale_gen, valid_image_generator=rescale_gen, epochs=40, dims=large_image_size)
Fitting model with fold no. 1 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.78216, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 33s - loss: 4.8001 - accuracy: 0.0192 - val_loss: 4.7822 - val_accuracy: 0.0137 Epoch 2/40 Epoch 00002: val_loss improved from 4.78216 to 4.70106, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 34s - loss: 4.6245 - accuracy: 0.0295 - val_loss: 4.7011 - val_accuracy: 0.0249 Epoch 3/40 Epoch 00003: val_loss did not improve from 4.70106 409/409 - 35s - loss: 4.4822 - accuracy: 0.0364 - val_loss: 4.7314 - val_accuracy: 0.0274 Epoch 4/40 Epoch 00004: val_loss improved from 4.70106 to 4.49060, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 39s - loss: 4.3613 - accuracy: 0.0446 - val_loss: 4.4906 - val_accuracy: 0.0381 Epoch 5/40 Epoch 00005: val_loss improved from 4.49060 to 4.38725, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 42s - loss: 4.2376 - accuracy: 0.0494 - val_loss: 4.3873 - val_accuracy: 0.0474 Epoch 6/40 Epoch 00006: val_loss improved from 4.38725 to 4.08425, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 36s - loss: 4.1356 - accuracy: 0.0613 - val_loss: 4.0842 - val_accuracy: 0.0743 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.08425 409/409 - 45s - loss: 4.0247 - accuracy: 0.0739 - val_loss: 4.1185 - val_accuracy: 0.0763 Epoch 8/40 Epoch 00008: val_loss did not improve from 4.08425 409/409 - 125s - loss: 3.9086 - accuracy: 0.0849 - val_loss: 4.2106 - val_accuracy: 0.0680 Epoch 9/40 Epoch 00009: val_loss did not improve from 4.08425 409/409 - 205s - loss: 3.8106 - accuracy: 0.0988 - val_loss: 4.1058 - val_accuracy: 0.0787 Epoch 10/40 Epoch 00010: val_loss improved from 4.08425 to 3.88472, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 164s - loss: 3.7303 - accuracy: 0.1028 - val_loss: 3.8847 - val_accuracy: 0.0939 Epoch 11/40 Epoch 00011: val_loss did not improve from 3.88472 409/409 - 25s - loss: 3.6267 - accuracy: 0.1170 - val_loss: 4.2621 - val_accuracy: 0.0694 Epoch 12/40 Epoch 00012: val_loss did not improve from 3.88472 409/409 - 26s - loss: 3.5452 - accuracy: 0.1208 - val_loss: 4.2460 - val_accuracy: 0.0743 Epoch 13/40 Epoch 00013: val_loss improved from 3.88472 to 3.78958, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 27s - loss: 3.4210 - accuracy: 0.1465 - val_loss: 3.7896 - val_accuracy: 0.1134 Epoch 14/40 Epoch 00014: val_loss improved from 3.78958 to 3.75723, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 28s - loss: 3.3549 - accuracy: 0.1527 - val_loss: 3.7572 - val_accuracy: 0.1257 Epoch 15/40 Epoch 00015: val_loss did not improve from 3.75723 409/409 - 28s - loss: 3.2568 - accuracy: 0.1701 - val_loss: 3.8203 - val_accuracy: 0.1262 Epoch 16/40 Epoch 00016: val_loss improved from 3.75723 to 3.65659, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 30s - loss: 3.1566 - accuracy: 0.1845 - val_loss: 3.6566 - val_accuracy: 0.1408 Epoch 17/40 Epoch 00017: val_loss improved from 3.65659 to 3.52204, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold0.h5 409/409 - 31s - loss: 3.0886 - accuracy: 0.1898 - val_loss: 3.5220 - val_accuracy: 0.1711 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.52204 409/409 - 30s - loss: 2.9707 - accuracy: 0.2190 - val_loss: 3.7977 - val_accuracy: 0.1330 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.52204 409/409 - 30s - loss: 2.8630 - accuracy: 0.2303 - val_loss: 3.7020 - val_accuracy: 0.1477 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.52204 409/409 - 30s - loss: 2.7860 - accuracy: 0.2511 - val_loss: 3.7223 - val_accuracy: 0.1575 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.52204 409/409 - 30s - loss: 2.6787 - accuracy: 0.2651 - val_loss: 3.6156 - val_accuracy: 0.1648 Epoch 22/40 Epoch 00022: val_loss did not improve from 3.52204 409/409 - 29s - loss: 2.6011 - accuracy: 0.2766 - val_loss: 4.0482 - val_accuracy: 0.1340 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.52204 409/409 - 29s - loss: 2.5306 - accuracy: 0.2942 - val_loss: 3.7230 - val_accuracy: 0.1736 Epoch 24/40 Epoch 00024: val_loss did not improve from 3.52204 409/409 - 30s - loss: 2.4278 - accuracy: 0.3103 - val_loss: 4.1694 - val_accuracy: 0.1198 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.52204 409/409 - 29s - loss: 2.3533 - accuracy: 0.3303 - val_loss: 4.1343 - val_accuracy: 0.1575 Epoch 00025: early stopping Fitting model with fold no. 2 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/40 WARNING:tensorflow:Callbacks method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0219s vs `on_train_batch_end` time: 0.0349s). Check your callbacks. Epoch 00001: val_loss improved from inf to 4.98603, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 30s - loss: 4.8244 - accuracy: 0.0164 - val_loss: 4.9860 - val_accuracy: 0.0142 Epoch 2/40 Epoch 00002: val_loss improved from 4.98603 to 4.88914, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 30s - loss: 4.6498 - accuracy: 0.0263 - val_loss: 4.8891 - val_accuracy: 0.0142 Epoch 3/40 Epoch 00003: val_loss improved from 4.88914 to 4.68354, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 29s - loss: 4.4822 - accuracy: 0.0371 - val_loss: 4.6835 - val_accuracy: 0.0279 Epoch 4/40 Epoch 00004: val_loss improved from 4.68354 to 4.49784, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 30s - loss: 4.3544 - accuracy: 0.0470 - val_loss: 4.4978 - val_accuracy: 0.0401 Epoch 5/40 Epoch 00005: val_loss improved from 4.49784 to 4.32423, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 50s - loss: 4.2354 - accuracy: 0.0536 - val_loss: 4.3242 - val_accuracy: 0.0469 Epoch 6/40 Epoch 00006: val_loss improved from 4.32423 to 4.26652, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 157s - loss: 4.1348 - accuracy: 0.0600 - val_loss: 4.2665 - val_accuracy: 0.0557 Epoch 7/40 Epoch 00007: val_loss improved from 4.26652 to 4.20099, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 29s - loss: 4.0368 - accuracy: 0.0671 - val_loss: 4.2010 - val_accuracy: 0.0621 Epoch 8/40 Epoch 00008: val_loss did not improve from 4.20099 409/409 - 29s - loss: 3.9410 - accuracy: 0.0807 - val_loss: 4.2047 - val_accuracy: 0.0758 Epoch 9/40 Epoch 00009: val_loss improved from 4.20099 to 3.92050, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 30s - loss: 3.8481 - accuracy: 0.0861 - val_loss: 3.9205 - val_accuracy: 0.0861 Epoch 10/40 Epoch 00010: val_loss did not improve from 3.92050 409/409 - 28s - loss: 3.7682 - accuracy: 0.0980 - val_loss: 4.0172 - val_accuracy: 0.0885 Epoch 11/40 Epoch 00011: val_loss did not improve from 3.92050 409/409 - 29s - loss: 3.6647 - accuracy: 0.1109 - val_loss: 3.9999 - val_accuracy: 0.0924 Epoch 12/40 Epoch 00012: val_loss did not improve from 3.92050 409/409 - 29s - loss: 3.5600 - accuracy: 0.1278 - val_loss: 4.0016 - val_accuracy: 0.0910 Epoch 13/40 Epoch 00013: val_loss did not improve from 3.92050 409/409 - 29s - loss: 3.4757 - accuracy: 0.1339 - val_loss: 3.9408 - val_accuracy: 0.1061 Epoch 14/40 Epoch 00014: val_loss improved from 3.92050 to 3.85984, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 28s - loss: 3.3847 - accuracy: 0.1476 - val_loss: 3.8598 - val_accuracy: 0.1183 Epoch 15/40 Epoch 00015: val_loss improved from 3.85984 to 3.83076, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 28s - loss: 3.2734 - accuracy: 0.1611 - val_loss: 3.8308 - val_accuracy: 0.1315 Epoch 16/40 Epoch 00016: val_loss improved from 3.83076 to 3.68356, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 30s - loss: 3.1870 - accuracy: 0.1801 - val_loss: 3.6836 - val_accuracy: 0.1369 Epoch 17/40 Epoch 00017: val_loss improved from 3.68356 to 3.49782, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold1.h5 409/409 - 30s - loss: 3.1030 - accuracy: 0.1920 - val_loss: 3.4978 - val_accuracy: 0.1638 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.49782 409/409 - 30s - loss: 2.9905 - accuracy: 0.2063 - val_loss: 3.8569 - val_accuracy: 0.1257 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.49782 409/409 - 30s - loss: 2.8937 - accuracy: 0.2239 - val_loss: 3.6637 - val_accuracy: 0.1467 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.49782 409/409 - 31s - loss: 2.8061 - accuracy: 0.2440 - val_loss: 3.7840 - val_accuracy: 0.1364 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.49782 409/409 - 30s - loss: 2.7343 - accuracy: 0.2495 - val_loss: 3.6415 - val_accuracy: 0.1614 Epoch 22/40 Epoch 00022: val_loss did not improve from 3.49782 409/409 - 31s - loss: 2.6300 - accuracy: 0.2753 - val_loss: 3.6018 - val_accuracy: 0.1633 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.49782 409/409 - 31s - loss: 2.5414 - accuracy: 0.2911 - val_loss: 3.9696 - val_accuracy: 0.1506 Epoch 24/40 Epoch 00024: val_loss did not improve from 3.49782 409/409 - 29s - loss: 2.4892 - accuracy: 0.2984 - val_loss: 3.7392 - val_accuracy: 0.1638 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.49782 409/409 - 29s - loss: 2.4000 - accuracy: 0.3142 - val_loss: 3.9434 - val_accuracy: 0.1550 Epoch 00025: early stopping Fitting model with fold no. 3 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.74913, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 4.8167 - accuracy: 0.0150 - val_loss: 4.7491 - val_accuracy: 0.0176 Epoch 2/40 Epoch 00002: val_loss improved from 4.74913 to 4.65389, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 4.6528 - accuracy: 0.0245 - val_loss: 4.6539 - val_accuracy: 0.0269 Epoch 3/40 Epoch 00003: val_loss improved from 4.65389 to 4.46439, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 4.5209 - accuracy: 0.0303 - val_loss: 4.4644 - val_accuracy: 0.0303 Epoch 4/40 Epoch 00004: val_loss improved from 4.46439 to 4.36432, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 4.3689 - accuracy: 0.0449 - val_loss: 4.3643 - val_accuracy: 0.0479 Epoch 5/40 Epoch 00005: val_loss improved from 4.36432 to 4.27713, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 4.2509 - accuracy: 0.0564 - val_loss: 4.2771 - val_accuracy: 0.0616 Epoch 6/40 Epoch 00006: val_loss improved from 4.27713 to 4.11593, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 4.1503 - accuracy: 0.0620 - val_loss: 4.1159 - val_accuracy: 0.0651 Epoch 7/40 Epoch 00007: val_loss improved from 4.11593 to 4.10984, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 4.0575 - accuracy: 0.0688 - val_loss: 4.1098 - val_accuracy: 0.0631 Epoch 8/40 Epoch 00008: val_loss improved from 4.10984 to 4.05670, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 31s - loss: 3.9762 - accuracy: 0.0808 - val_loss: 4.0567 - val_accuracy: 0.0812 Epoch 9/40 Epoch 00009: val_loss did not improve from 4.05670 409/409 - 29s - loss: 3.9029 - accuracy: 0.0901 - val_loss: 4.3464 - val_accuracy: 0.0577 Epoch 10/40 Epoch 00010: val_loss improved from 4.05670 to 3.97742, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 30s - loss: 3.8328 - accuracy: 0.0987 - val_loss: 3.9774 - val_accuracy: 0.0856 Epoch 11/40 Epoch 00011: val_loss improved from 3.97742 to 3.91024, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 3.7341 - accuracy: 0.1034 - val_loss: 3.9102 - val_accuracy: 0.0983 Epoch 12/40 Epoch 00012: val_loss improved from 3.91024 to 3.89061, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 3.6566 - accuracy: 0.1130 - val_loss: 3.8906 - val_accuracy: 0.1067 Epoch 13/40 Epoch 00013: val_loss did not improve from 3.89061 409/409 - 28s - loss: 3.5561 - accuracy: 0.1278 - val_loss: 3.9689 - val_accuracy: 0.0939 Epoch 14/40 Epoch 00014: val_loss improved from 3.89061 to 3.71772, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 3.4557 - accuracy: 0.1461 - val_loss: 3.7177 - val_accuracy: 0.1076 Epoch 15/40 Epoch 00015: val_loss improved from 3.71772 to 3.60087, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 3.3747 - accuracy: 0.1522 - val_loss: 3.6009 - val_accuracy: 0.1355 Epoch 16/40 Epoch 00016: val_loss did not improve from 3.60087 409/409 - 29s - loss: 3.2829 - accuracy: 0.1636 - val_loss: 3.9889 - val_accuracy: 0.1032 Epoch 17/40 Epoch 00017: val_loss improved from 3.60087 to 3.52477, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 3.2014 - accuracy: 0.1746 - val_loss: 3.5248 - val_accuracy: 0.1414 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.52477 409/409 - 29s - loss: 3.1130 - accuracy: 0.1922 - val_loss: 3.5436 - val_accuracy: 0.1419 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.52477 409/409 - 29s - loss: 3.0056 - accuracy: 0.2100 - val_loss: 3.6238 - val_accuracy: 0.1409 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.52477 409/409 - 29s - loss: 2.9157 - accuracy: 0.2285 - val_loss: 3.6128 - val_accuracy: 0.1522 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.52477 409/409 - 29s - loss: 2.8565 - accuracy: 0.2323 - val_loss: 3.5350 - val_accuracy: 0.1580 Epoch 22/40 Epoch 00022: val_loss improved from 3.52477 to 3.51674, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold2.h5 409/409 - 29s - loss: 2.7373 - accuracy: 0.2602 - val_loss: 3.5167 - val_accuracy: 0.1781 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.51674 409/409 - 28s - loss: 2.6545 - accuracy: 0.2667 - val_loss: 4.1272 - val_accuracy: 0.1282 Epoch 24/40 Epoch 00024: val_loss did not improve from 3.51674 409/409 - 28s - loss: 2.5662 - accuracy: 0.2843 - val_loss: 3.6907 - val_accuracy: 0.1600 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.51674 409/409 - 29s - loss: 2.4837 - accuracy: 0.3069 - val_loss: 3.7615 - val_accuracy: 0.1502 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.51674 409/409 - 29s - loss: 2.4383 - accuracy: 0.3189 - val_loss: 3.9222 - val_accuracy: 0.1394 Epoch 27/40 Epoch 00027: val_loss did not improve from 3.51674 409/409 - 29s - loss: 2.3389 - accuracy: 0.3338 - val_loss: 4.2092 - val_accuracy: 0.1321 Epoch 28/40 Epoch 00028: val_loss did not improve from 3.51674 409/409 - 29s - loss: 2.2954 - accuracy: 0.3503 - val_loss: 3.6946 - val_accuracy: 0.1634 Epoch 29/40 Epoch 00029: val_loss did not improve from 3.51674 409/409 - 28s - loss: 2.2201 - accuracy: 0.3606 - val_loss: 4.0377 - val_accuracy: 0.1566 Epoch 30/40 Epoch 00030: val_loss did not improve from 3.51674 409/409 - 29s - loss: 2.1687 - accuracy: 0.3755 - val_loss: 3.7200 - val_accuracy: 0.1703 Epoch 00030: early stopping Fitting model with fold no. 4 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.75801, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 4.8200 - accuracy: 0.0138 - val_loss: 4.7580 - val_accuracy: 0.0152 Epoch 2/40 Epoch 00002: val_loss improved from 4.75801 to 4.57313, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 4.6399 - accuracy: 0.0248 - val_loss: 4.5731 - val_accuracy: 0.0289 Epoch 3/40 Epoch 00003: val_loss improved from 4.57313 to 4.37511, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 4.5118 - accuracy: 0.0318 - val_loss: 4.3751 - val_accuracy: 0.0435 Epoch 4/40 Epoch 00004: val_loss did not improve from 4.37511 409/409 - 29s - loss: 4.3678 - accuracy: 0.0445 - val_loss: 4.4802 - val_accuracy: 0.0416 Epoch 5/40 Epoch 00005: val_loss did not improve from 4.37511 409/409 - 29s - loss: 4.2529 - accuracy: 0.0540 - val_loss: 4.3935 - val_accuracy: 0.0440 Epoch 6/40 Epoch 00006: val_loss improved from 4.37511 to 4.04088, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 4.1488 - accuracy: 0.0616 - val_loss: 4.0409 - val_accuracy: 0.0705 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.04088 409/409 - 29s - loss: 4.0189 - accuracy: 0.0697 - val_loss: 4.1535 - val_accuracy: 0.0602 Epoch 8/40 Epoch 00008: val_loss did not improve from 4.04088 409/409 - 29s - loss: 3.9302 - accuracy: 0.0809 - val_loss: 4.0517 - val_accuracy: 0.0719 Epoch 9/40 Epoch 00009: val_loss did not improve from 4.04088 409/409 - 29s - loss: 3.8356 - accuracy: 0.0932 - val_loss: 4.1696 - val_accuracy: 0.0812 Epoch 10/40 Epoch 00010: val_loss improved from 4.04088 to 3.87302, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 3.7309 - accuracy: 0.1080 - val_loss: 3.8730 - val_accuracy: 0.1003 Epoch 11/40 Epoch 00011: val_loss improved from 3.87302 to 3.80904, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 3.6298 - accuracy: 0.1231 - val_loss: 3.8090 - val_accuracy: 0.1076 Epoch 12/40 Epoch 00012: val_loss improved from 3.80904 to 3.79714, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 28s - loss: 3.5461 - accuracy: 0.1251 - val_loss: 3.7971 - val_accuracy: 0.1057 Epoch 13/40 Epoch 00013: val_loss improved from 3.79714 to 3.67418, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 3.4567 - accuracy: 0.1409 - val_loss: 3.6742 - val_accuracy: 0.1189 Epoch 14/40 Epoch 00014: val_loss did not improve from 3.67418 409/409 - 29s - loss: 3.3364 - accuracy: 0.1588 - val_loss: 4.1159 - val_accuracy: 0.0969 Epoch 15/40 Epoch 00015: val_loss did not improve from 3.67418 409/409 - 29s - loss: 3.2594 - accuracy: 0.1668 - val_loss: 4.0615 - val_accuracy: 0.0983 Epoch 16/40 Epoch 00016: val_loss did not improve from 3.67418 409/409 - 29s - loss: 3.1677 - accuracy: 0.1821 - val_loss: 3.8398 - val_accuracy: 0.1213 Epoch 17/40 Epoch 00017: val_loss did not improve from 3.67418 409/409 - 28s - loss: 3.0398 - accuracy: 0.2011 - val_loss: 3.7839 - val_accuracy: 0.1184 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.67418 409/409 - 29s - loss: 2.9422 - accuracy: 0.2174 - val_loss: 3.8328 - val_accuracy: 0.1267 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.67418 409/409 - 29s - loss: 2.8453 - accuracy: 0.2339 - val_loss: 3.6782 - val_accuracy: 0.1575 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.67418 409/409 - 29s - loss: 2.7705 - accuracy: 0.2493 - val_loss: 3.9813 - val_accuracy: 0.1409 Epoch 21/40 Epoch 00021: val_loss improved from 3.67418 to 3.63140, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 2.6814 - accuracy: 0.2657 - val_loss: 3.6314 - val_accuracy: 0.1585 Epoch 22/40 Epoch 00022: val_loss improved from 3.63140 to 3.58434, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold3.h5 409/409 - 29s - loss: 2.6127 - accuracy: 0.2822 - val_loss: 3.5843 - val_accuracy: 0.1742 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.58434 409/409 - 29s - loss: 2.5355 - accuracy: 0.2975 - val_loss: 3.8096 - val_accuracy: 0.1619 Epoch 24/40 Epoch 00024: val_loss did not improve from 3.58434 409/409 - 28s - loss: 2.4558 - accuracy: 0.3058 - val_loss: 4.7970 - val_accuracy: 0.1316 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.58434 409/409 - 28s - loss: 2.3851 - accuracy: 0.3278 - val_loss: 3.7053 - val_accuracy: 0.1649 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.58434 409/409 - 29s - loss: 2.3050 - accuracy: 0.3470 - val_loss: 3.6827 - val_accuracy: 0.1707 Epoch 27/40 Epoch 00027: val_loss did not improve from 3.58434 409/409 - 28s - loss: 2.2319 - accuracy: 0.3557 - val_loss: 4.1582 - val_accuracy: 0.1458 Epoch 28/40 Epoch 00028: val_loss did not improve from 3.58434 409/409 - 29s - loss: 2.1522 - accuracy: 0.3772 - val_loss: 4.0094 - val_accuracy: 0.1737 Epoch 29/40 Epoch 00029: val_loss did not improve from 3.58434 409/409 - 29s - loss: 2.0896 - accuracy: 0.3855 - val_loss: 3.7839 - val_accuracy: 0.1800 Epoch 30/40 Epoch 00030: val_loss did not improve from 3.58434 409/409 - 29s - loss: 2.0349 - accuracy: 0.4051 - val_loss: 3.9521 - val_accuracy: 0.1570 Epoch 00030: early stopping Fitting model with fold no. 5 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/40 WARNING:tensorflow:Callbacks method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0200s vs `on_train_batch_end` time: 0.0319s). Check your callbacks. Epoch 00001: val_loss improved from inf to 4.91452, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 29s - loss: 4.8257 - accuracy: 0.0149 - val_loss: 4.9145 - val_accuracy: 0.0098 Epoch 2/40 Epoch 00002: val_loss improved from 4.91452 to 4.85273, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 29s - loss: 4.6568 - accuracy: 0.0251 - val_loss: 4.8527 - val_accuracy: 0.0166 Epoch 3/40 Epoch 00003: val_loss improved from 4.85273 to 4.48550, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 29s - loss: 4.5437 - accuracy: 0.0335 - val_loss: 4.4855 - val_accuracy: 0.0372 Epoch 4/40 Epoch 00004: val_loss improved from 4.48550 to 4.39166, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 31s - loss: 4.4246 - accuracy: 0.0418 - val_loss: 4.3917 - val_accuracy: 0.0494 Epoch 5/40 Epoch 00005: val_loss improved from 4.39166 to 4.32705, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 29s - loss: 4.2917 - accuracy: 0.0511 - val_loss: 4.3270 - val_accuracy: 0.0494 Epoch 6/40 Epoch 00006: val_loss improved from 4.32705 to 4.22838, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 28s - loss: 4.1727 - accuracy: 0.0583 - val_loss: 4.2284 - val_accuracy: 0.0616 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.22838 409/409 - 28s - loss: 4.0537 - accuracy: 0.0688 - val_loss: 4.6212 - val_accuracy: 0.0445 Epoch 8/40 Epoch 00008: val_loss improved from 4.22838 to 4.12496, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 28s - loss: 3.9462 - accuracy: 0.0808 - val_loss: 4.1250 - val_accuracy: 0.0758 Epoch 9/40 Epoch 00009: val_loss improved from 4.12496 to 3.97926, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 28s - loss: 3.8313 - accuracy: 0.0962 - val_loss: 3.9793 - val_accuracy: 0.0832 Epoch 10/40 Epoch 00010: val_loss did not improve from 3.97926 409/409 - 28s - loss: 3.7296 - accuracy: 0.1070 - val_loss: 3.9993 - val_accuracy: 0.0797 Epoch 11/40 Epoch 00011: val_loss improved from 3.97926 to 3.82586, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 28s - loss: 3.6330 - accuracy: 0.1173 - val_loss: 3.8259 - val_accuracy: 0.1047 Epoch 12/40 Epoch 00012: val_loss did not improve from 3.82586 409/409 - 28s - loss: 3.5400 - accuracy: 0.1269 - val_loss: 3.8563 - val_accuracy: 0.1081 Epoch 13/40 Epoch 00013: val_loss improved from 3.82586 to 3.75942, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 28s - loss: 3.4198 - accuracy: 0.1451 - val_loss: 3.7594 - val_accuracy: 0.1115 Epoch 14/40 Epoch 00014: val_loss improved from 3.75942 to 3.61701, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_improved_simple_cnn_fold4.h5 409/409 - 28s - loss: 3.3423 - accuracy: 0.1613 - val_loss: 3.6170 - val_accuracy: 0.1380 Epoch 15/40 Epoch 00015: val_loss did not improve from 3.61701 409/409 - 28s - loss: 3.2581 - accuracy: 0.1640 - val_loss: 3.6877 - val_accuracy: 0.1243 Epoch 16/40 Epoch 00016: val_loss did not improve from 3.61701 409/409 - 28s - loss: 3.1516 - accuracy: 0.1829 - val_loss: 3.8247 - val_accuracy: 0.1257 Epoch 17/40 Epoch 00017: val_loss did not improve from 3.61701 409/409 - 28s - loss: 3.0468 - accuracy: 0.2052 - val_loss: 3.8168 - val_accuracy: 0.1316 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.61701 409/409 - 28s - loss: 2.9679 - accuracy: 0.2211 - val_loss: 3.7090 - val_accuracy: 0.1370 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.61701 409/409 - 28s - loss: 2.8899 - accuracy: 0.2278 - val_loss: 4.0450 - val_accuracy: 0.1360 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.61701 409/409 - 28s - loss: 2.7816 - accuracy: 0.2444 - val_loss: 3.6265 - val_accuracy: 0.1693 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.61701 409/409 - 28s - loss: 2.6952 - accuracy: 0.2591 - val_loss: 3.8084 - val_accuracy: 0.1629 Epoch 22/40 Epoch 00022: val_loss did not improve from 3.61701 409/409 - 28s - loss: 2.6174 - accuracy: 0.2786 - val_loss: 3.7366 - val_accuracy: 0.1639 Epoch 00022: early stopping
plot_model_fold_stats(k=k, history=improved_cnn_history)
(5, 2)
print_model_last_epoch_stats(history=improved_cnn_history)
Mean fold accuracy: 34.07% Mean fold loss: 2.3148712635040285 Mean fold validation accuracy: 16.07% Mean fold validation loss: 3.8972976207733154
improved_cnn_metrics = calculate_model_metrics(saved_model_prefix='improved_simple_cnn_fold', dims=224,
create_model_func=lambda : create_improved_model('adam'), folds=train_weighted_folds, preprocess_func=lambda s: s / 255., dtype=np.float64)
plot_confusion_matrix(improved_cnn_metrics['confusion'], labels=labels, title="Confusion Matrix Simple CNN Improved")
read_and_plot_correct_wrong(correct=improved_cnn_metrics['correct'], wrong=improved_cnn_metrics['wrong'])
print('Correct percentage: {}%'.format(round(improved_cnn_metrics['correct_perc'] * 100, 2)))
Correct percentage: 0.78%
X_test, test_ids = get_test_samples(preprocess_func=lambda s: s / 255., dims=224, dtype=np.float64)
0%|▏ | 31/10357 [00:00<01:10, 147.22it/s]
Loading test data (10357, 224, 224, 3)
100%|███████████████████████████████████████████████████████████████████████████| 10357/10357 [01:18<00:00, 132.57it/s]
submission_data = create_submission_file(X_test=X_test, test_ids=test_ids, name='improved_submission.csv', dims=224,
saved_model_prefix='model_improved_simple_cnn_fold', create_model_func= lambda : create_improved_model('adam'))
submission_data
Loading best model for fold no. 1 518/518 - 13s Loading best model for fold no. 2 518/518 - 15s Loading best model for fold no. 3 518/518 - 18s Loading best model for fold no. 4 518/518 - 22s Loading best model for fold no. 5 518/518 - 24s
| id | affenpinscher | afghan_hound | african_hunting_dog | airedale | american_staffordshire_terrier | appenzeller | australian_terrier | basenji | basset | ... | toy_poodle | toy_terrier | vizsla | walker_hound | weimaraner | welsh_springer_spaniel | west_highland_white_terrier | whippet | wire-haired_fox_terrier | yorkshire_terrier | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 000621fb3cbb32d8935728e48679680e | 2.144512e-05 | 0.000619 | 9.317542e-06 | 0.001857 | 0.000400 | 6.070149e-04 | 2.666213e-06 | 2.615361e-05 | 0.002010 | ... | 0.000356 | 9.400729e-07 | 0.004305 | 0.003872 | 0.000013 | 3.195044e-07 | 0.008729 | 0.002631 | 3.375650e-05 | 0.000002 |
| 1 | 00102ee9d8eb90812350685311fe5890 | 4.442798e-04 | 0.000004 | 4.429980e-04 | 0.000005 | 0.000160 | 1.774236e-07 | 1.793715e-10 | 4.919388e-06 | 0.000045 | ... | 0.000298 | 1.043975e-05 | 0.001048 | 0.002818 | 0.000119 | 1.280820e-05 | 0.000003 | 0.000085 | 3.426826e-08 | 0.000002 |
| 2 | 0012a730dfa437f5f3613fb75efcd4ce | 1.669309e-04 | 0.002639 | 3.984945e-05 | 0.002367 | 0.061110 | 4.573074e-04 | 3.831987e-06 | 3.432477e-04 | 0.000472 | ... | 0.000768 | 1.688881e-03 | 0.003052 | 0.043348 | 0.002629 | 3.905412e-05 | 0.049086 | 0.000904 | 1.180893e-04 | 0.000055 |
| 3 | 001510bc8570bbeee98c8d80c8a95ec1 | 1.320521e-04 | 0.001885 | 1.316597e-04 | 0.022206 | 0.001052 | 2.322877e-03 | 1.018113e-02 | 1.459216e-02 | 0.004484 | ... | 0.004191 | 1.340324e-04 | 0.009578 | 0.000485 | 0.000659 | 3.277568e-05 | 0.005122 | 0.011622 | 8.496747e-04 | 0.000119 |
| 4 | 001a5f3114548acdefa3d4da05474c2e | 1.348277e-03 | 0.001121 | 2.766325e-03 | 0.009323 | 0.001741 | 8.960483e-04 | 5.960839e-04 | 3.535169e-04 | 0.012484 | ... | 0.018997 | 6.132192e-05 | 0.009744 | 0.004944 | 0.000255 | 3.128378e-05 | 0.000807 | 0.005180 | 1.498030e-04 | 0.000252 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 10352 | ffeda8623d4eee33c6d1156a2ecbfcf8 | 4.779456e-06 | 0.000519 | 4.553959e-07 | 0.001600 | 0.003222 | 4.333176e-04 | 1.052073e-07 | 2.387645e-05 | 0.000086 | ... | 0.000107 | 6.005065e-05 | 0.001522 | 0.093911 | 0.000135 | 1.871218e-06 | 0.027570 | 0.000236 | 2.302135e-05 | 0.000011 |
| 10353 | fff1ec9e6e413275984966f745a313b0 | 8.374298e-03 | 0.008866 | 4.927144e-03 | 0.047773 | 0.001810 | 9.137831e-03 | 4.319807e-04 | 3.216693e-01 | 0.002868 | ... | 0.004483 | 6.017507e-04 | 0.000142 | 0.000196 | 0.002199 | 7.602975e-04 | 0.000150 | 0.010798 | 6.029317e-03 | 0.000300 |
| 10354 | fff74b59b758bbbf13a5793182a9bbe4 | 1.007527e-04 | 0.020113 | 3.443260e-05 | 0.037717 | 0.000610 | 3.737891e-04 | 4.820871e-03 | 2.312609e-03 | 0.002277 | ... | 0.000920 | 5.485362e-06 | 0.003570 | 0.000640 | 0.000126 | 9.018207e-07 | 0.001463 | 0.003648 | 3.090318e-04 | 0.000025 |
| 10355 | fff7d50d848e8014ac1e9172dc6762a3 | 2.010942e-07 | 0.000054 | 1.408543e-08 | 0.000093 | 0.004617 | 1.225257e-04 | 1.582808e-07 | 1.704731e-09 | 0.000007 | ... | 0.002025 | 1.222511e-05 | 0.046217 | 0.012860 | 0.000017 | 4.603556e-06 | 0.011885 | 0.000131 | 8.309052e-05 | 0.000008 |
| 10356 | fffbff22c1f51e3dc80c4bf04089545b | 4.942839e-04 | 0.001428 | 6.535852e-05 | 0.002012 | 0.003678 | 1.777375e-04 | 5.176572e-06 | 1.005039e-03 | 0.001863 | ... | 0.000793 | 1.269113e-04 | 0.001749 | 0.101669 | 0.000483 | 7.582256e-06 | 0.002502 | 0.000573 | 7.055230e-05 | 0.000250 |
10357 rows × 121 columns
# Apply data augmentation to train samples to increase available data
aug_image_gen = ImageDataGenerator(rescale=1./255, # normalize input to 0-1 to increase convergence speed
shear_range=0.2,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
fill_mode='nearest',
zoom_range=0.2,
horizontal_flip=True)
valid_image_gen = ImageDataGenerator(rescale=1./255)
As we can see, we chose to use preety loose augmentation settings on our dataset due to its small size.
We will use 40 deg rotation, zoom of up to 20%, height and width shifts of up to 20%, shear of up to 20% and horizontal flipping.
rand_train_image = random.choice(os.listdir(train_dir))
img = np.expand_dims(plt.imread(join(train_dir, rand_train_image)), 0)
plt.imshow(img[0])
aug_images = [(next(aug_image_gen.flow(img))[0] * 255).astype(np.uint8) for i in range(15)]
flg, axes = plt.subplots(1, 10, figsize=(20,20))
axes = axes.flatten()
for img, ax in zip(aug_images, axes):
ax.imshow(img)
ax.axis('off')
plt.tight_layout()
plt.show()
improved_cnn_history_aug = train_improved_simple_cnn_model(folds=train_weighted_folds, train_image_generator=aug_image_gen, valid_image_generator=valid_image_gen, epochs=40, dims=large_image_size, prefix="augment")
Fitting model with fold no. 1 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.79580, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 109s - loss: 4.8297 - accuracy: 0.0147 - val_loss: 4.7958 - val_accuracy: 0.0156 Epoch 2/40 Epoch 00002: val_loss improved from 4.79580 to 4.60787, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 104s - loss: 4.6937 - accuracy: 0.0220 - val_loss: 4.6079 - val_accuracy: 0.0225 Epoch 3/40 Epoch 00003: val_loss did not improve from 4.60787 409/409 - 103s - loss: 4.5791 - accuracy: 0.0301 - val_loss: 4.8431 - val_accuracy: 0.0152 Epoch 4/40 Epoch 00004: val_loss improved from 4.60787 to 4.51355, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 4.4813 - accuracy: 0.0364 - val_loss: 4.5135 - val_accuracy: 0.0342 Epoch 5/40 Epoch 00005: val_loss improved from 4.51355 to 4.44129, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 4.3821 - accuracy: 0.0437 - val_loss: 4.4413 - val_accuracy: 0.0435 Epoch 6/40 Epoch 00006: val_loss improved from 4.44129 to 4.23360, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 4.3166 - accuracy: 0.0497 - val_loss: 4.2336 - val_accuracy: 0.0557 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.23360 409/409 - 103s - loss: 4.2622 - accuracy: 0.0510 - val_loss: 4.7362 - val_accuracy: 0.0284 Epoch 8/40 Epoch 00008: val_loss improved from 4.23360 to 4.12365, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 4.2177 - accuracy: 0.0583 - val_loss: 4.1236 - val_accuracy: 0.0645 Epoch 9/40 Epoch 00009: val_loss improved from 4.12365 to 4.09076, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 4.1461 - accuracy: 0.0624 - val_loss: 4.0908 - val_accuracy: 0.0743 Epoch 10/40 Epoch 00010: val_loss improved from 4.09076 to 4.02995, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 4.0834 - accuracy: 0.0681 - val_loss: 4.0300 - val_accuracy: 0.0831 Epoch 11/40 Epoch 00011: val_loss improved from 4.02995 to 3.98619, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 4.0418 - accuracy: 0.0717 - val_loss: 3.9862 - val_accuracy: 0.0709 Epoch 12/40 Epoch 00012: val_loss did not improve from 3.98619 409/409 - 103s - loss: 3.9814 - accuracy: 0.0740 - val_loss: 4.2040 - val_accuracy: 0.0587 Epoch 13/40 Epoch 00013: val_loss improved from 3.98619 to 3.86739, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 3.9415 - accuracy: 0.0807 - val_loss: 3.8674 - val_accuracy: 0.0973 Epoch 14/40 Epoch 00014: val_loss did not improve from 3.86739 409/409 - 102s - loss: 3.9006 - accuracy: 0.0895 - val_loss: 3.8913 - val_accuracy: 0.1027 Epoch 15/40 Epoch 00015: val_loss improved from 3.86739 to 3.78451, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 3.8634 - accuracy: 0.0909 - val_loss: 3.7845 - val_accuracy: 0.1032 Epoch 16/40 Epoch 00016: val_loss did not improve from 3.78451 409/409 - 103s - loss: 3.7981 - accuracy: 0.1009 - val_loss: 3.9660 - val_accuracy: 0.0802 Epoch 17/40 Epoch 00017: val_loss improved from 3.78451 to 3.73607, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 3.7831 - accuracy: 0.1106 - val_loss: 3.7361 - val_accuracy: 0.1159 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.73607 409/409 - 103s - loss: 3.7334 - accuracy: 0.1082 - val_loss: 3.9958 - val_accuracy: 0.0924 Epoch 19/40 Epoch 00019: val_loss improved from 3.73607 to 3.62614, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 3.7002 - accuracy: 0.1139 - val_loss: 3.6261 - val_accuracy: 0.1311 Epoch 20/40 Epoch 00020: val_loss improved from 3.62614 to 3.61545, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 3.6627 - accuracy: 0.1153 - val_loss: 3.6155 - val_accuracy: 0.1306 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.61545 409/409 - 103s - loss: 3.6481 - accuracy: 0.1212 - val_loss: 4.0349 - val_accuracy: 0.0983 Epoch 22/40 Epoch 00022: val_loss improved from 3.61545 to 3.61461, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 3.5971 - accuracy: 0.1278 - val_loss: 3.6146 - val_accuracy: 0.1443 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.61461 409/409 - 102s - loss: 3.5767 - accuracy: 0.1311 - val_loss: 3.6496 - val_accuracy: 0.1389 Epoch 24/40 Epoch 00024: val_loss did not improve from 3.61461 409/409 - 103s - loss: 3.5589 - accuracy: 0.1400 - val_loss: 3.8514 - val_accuracy: 0.1301 Epoch 25/40 Epoch 00025: val_loss improved from 3.61461 to 3.47642, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 102s - loss: 3.5340 - accuracy: 0.1436 - val_loss: 3.4764 - val_accuracy: 0.1545 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.47642 409/409 - 102s - loss: 3.4844 - accuracy: 0.1438 - val_loss: 3.5372 - val_accuracy: 0.1423 Epoch 27/40 Epoch 00027: val_loss did not improve from 3.47642 409/409 - 103s - loss: 3.4586 - accuracy: 0.1531 - val_loss: 3.5217 - val_accuracy: 0.1570 Epoch 28/40 Epoch 00028: val_loss did not improve from 3.47642 409/409 - 103s - loss: 3.4509 - accuracy: 0.1551 - val_loss: 3.6740 - val_accuracy: 0.1291 Epoch 29/40 Epoch 00029: val_loss did not improve from 3.47642 409/409 - 112s - loss: 3.4048 - accuracy: 0.1568 - val_loss: 3.7076 - val_accuracy: 0.1330 Epoch 30/40 Epoch 00030: val_loss improved from 3.47642 to 3.37568, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 3.3870 - accuracy: 0.1619 - val_loss: 3.3757 - val_accuracy: 0.1746 Epoch 31/40 Epoch 00031: val_loss did not improve from 3.37568 409/409 - 103s - loss: 3.3841 - accuracy: 0.1586 - val_loss: 3.5136 - val_accuracy: 0.1614 Epoch 32/40 Epoch 00032: val_loss did not improve from 3.37568 409/409 - 102s - loss: 3.3617 - accuracy: 0.1634 - val_loss: 3.3879 - val_accuracy: 0.1746 Epoch 33/40 Epoch 00033: val_loss did not improve from 3.37568 409/409 - 103s - loss: 3.3224 - accuracy: 0.1785 - val_loss: 3.5666 - val_accuracy: 0.1716 Epoch 34/40 Epoch 00034: val_loss did not improve from 3.37568 409/409 - 103s - loss: 3.3154 - accuracy: 0.1787 - val_loss: 3.5276 - val_accuracy: 0.1687 Epoch 35/40 Epoch 00035: val_loss did not improve from 3.37568 409/409 - 102s - loss: 3.3220 - accuracy: 0.1737 - val_loss: 3.6844 - val_accuracy: 0.1643 Epoch 36/40 Epoch 00036: val_loss improved from 3.37568 to 3.20643, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 3.2632 - accuracy: 0.1839 - val_loss: 3.2064 - val_accuracy: 0.2108 Epoch 37/40 Epoch 00037: val_loss did not improve from 3.20643 409/409 - 103s - loss: 3.2701 - accuracy: 0.1870 - val_loss: 3.2270 - val_accuracy: 0.2200 Epoch 38/40 Epoch 00038: val_loss improved from 3.20643 to 3.17893, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold0.h5 409/409 - 103s - loss: 3.2622 - accuracy: 0.1831 - val_loss: 3.1789 - val_accuracy: 0.2064 Epoch 39/40 Epoch 00039: val_loss did not improve from 3.17893 409/409 - 103s - loss: 3.2335 - accuracy: 0.1867 - val_loss: 3.2743 - val_accuracy: 0.1883 Epoch 40/40 Epoch 00040: val_loss did not improve from 3.17893 409/409 - 103s - loss: 3.1764 - accuracy: 0.1949 - val_loss: 3.2673 - val_accuracy: 0.1966 Fitting model with fold no. 2 Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.74678, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 4.8370 - accuracy: 0.0131 - val_loss: 4.7468 - val_accuracy: 0.0171 Epoch 2/40 Epoch 00002: val_loss improved from 4.74678 to 4.66620, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 104s - loss: 4.7237 - accuracy: 0.0190 - val_loss: 4.6662 - val_accuracy: 0.0220 Epoch 3/40 Epoch 00003: val_loss improved from 4.66620 to 4.65711, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 4.6300 - accuracy: 0.0275 - val_loss: 4.6571 - val_accuracy: 0.0259 Epoch 4/40 Epoch 00004: val_loss improved from 4.65711 to 4.46510, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 4.5091 - accuracy: 0.0346 - val_loss: 4.4651 - val_accuracy: 0.0381 Epoch 5/40 Epoch 00005: val_loss improved from 4.46510 to 4.29924, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 104s - loss: 4.4228 - accuracy: 0.0423 - val_loss: 4.2992 - val_accuracy: 0.0469 Epoch 6/40 Epoch 00006: val_loss did not improve from 4.29924 409/409 - 104s - loss: 4.3250 - accuracy: 0.0517 - val_loss: 4.4313 - val_accuracy: 0.0504 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.29924 409/409 - 104s - loss: 4.2640 - accuracy: 0.0543 - val_loss: 4.3818 - val_accuracy: 0.0557 Epoch 8/40 Epoch 00008: val_loss did not improve from 4.29924 409/409 - 105s - loss: 4.2027 - accuracy: 0.0580 - val_loss: 4.8786 - val_accuracy: 0.0244 Epoch 9/40 Epoch 00009: val_loss did not improve from 4.29924 409/409 - 105s - loss: 4.1296 - accuracy: 0.0643 - val_loss: 4.5308 - val_accuracy: 0.0474 Epoch 10/40 Epoch 00010: val_loss improved from 4.29924 to 3.96133, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 104s - loss: 4.0756 - accuracy: 0.0684 - val_loss: 3.9613 - val_accuracy: 0.0914 Epoch 11/40 Epoch 00011: val_loss improved from 3.96133 to 3.94639, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 104s - loss: 4.0173 - accuracy: 0.0736 - val_loss: 3.9464 - val_accuracy: 0.0890 Epoch 12/40 Epoch 00012: val_loss did not improve from 3.94639 409/409 - 103s - loss: 3.9769 - accuracy: 0.0817 - val_loss: 4.0829 - val_accuracy: 0.0675 Epoch 13/40 Epoch 00013: val_loss did not improve from 3.94639 409/409 - 103s - loss: 3.9130 - accuracy: 0.0881 - val_loss: 4.0530 - val_accuracy: 0.0866 Epoch 14/40 Epoch 00014: val_loss did not improve from 3.94639 409/409 - 103s - loss: 3.8807 - accuracy: 0.0934 - val_loss: 3.9740 - val_accuracy: 0.0870 Epoch 15/40 Epoch 00015: val_loss improved from 3.94639 to 3.80035, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 3.8498 - accuracy: 0.0928 - val_loss: 3.8004 - val_accuracy: 0.1076 Epoch 16/40 Epoch 00016: val_loss did not improve from 3.80035 409/409 - 103s - loss: 3.7933 - accuracy: 0.0965 - val_loss: 3.9415 - val_accuracy: 0.1002 Epoch 17/40 Epoch 00017: val_loss did not improve from 3.80035 409/409 - 103s - loss: 3.7792 - accuracy: 0.1069 - val_loss: 3.8124 - val_accuracy: 0.1134 Epoch 18/40 Epoch 00018: val_loss improved from 3.80035 to 3.74391, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 102s - loss: 3.7523 - accuracy: 0.1088 - val_loss: 3.7439 - val_accuracy: 0.1193 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.74391 409/409 - 103s - loss: 3.7076 - accuracy: 0.1103 - val_loss: 4.0609 - val_accuracy: 0.0919 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.74391 409/409 - 103s - loss: 3.6912 - accuracy: 0.1158 - val_loss: 3.8911 - val_accuracy: 0.1012 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.74391 409/409 - 103s - loss: 3.6588 - accuracy: 0.1174 - val_loss: 4.1884 - val_accuracy: 0.0939 Epoch 22/40 Epoch 00022: val_loss improved from 3.74391 to 3.56960, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 3.6260 - accuracy: 0.1224 - val_loss: 3.5696 - val_accuracy: 0.1394 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.56960 409/409 - 103s - loss: 3.6105 - accuracy: 0.1280 - val_loss: 3.7956 - val_accuracy: 0.1159 Epoch 24/40 Epoch 00024: val_loss did not improve from 3.56960 409/409 - 103s - loss: 3.5758 - accuracy: 0.1364 - val_loss: 3.6394 - val_accuracy: 0.1330 Epoch 25/40 Epoch 00025: val_loss improved from 3.56960 to 3.43137, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 3.5401 - accuracy: 0.1372 - val_loss: 3.4314 - val_accuracy: 0.1663 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.43137 409/409 - 103s - loss: 3.5142 - accuracy: 0.1393 - val_loss: 3.5267 - val_accuracy: 0.1545 Epoch 27/40 Epoch 00027: val_loss did not improve from 3.43137 409/409 - 103s - loss: 3.4902 - accuracy: 0.1465 - val_loss: 3.5626 - val_accuracy: 0.1477 Epoch 28/40 Epoch 00028: val_loss did not improve from 3.43137 409/409 - 103s - loss: 3.4759 - accuracy: 0.1503 - val_loss: 3.4628 - val_accuracy: 0.1555 Epoch 29/40 Epoch 00029: val_loss improved from 3.43137 to 3.42633, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 3.4839 - accuracy: 0.1436 - val_loss: 3.4263 - val_accuracy: 0.1638 Epoch 30/40 Epoch 00030: val_loss improved from 3.42633 to 3.34038, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 104s - loss: 3.4307 - accuracy: 0.1597 - val_loss: 3.3404 - val_accuracy: 0.1751 Epoch 31/40 Epoch 00031: val_loss did not improve from 3.34038 409/409 - 104s - loss: 3.4102 - accuracy: 0.1571 - val_loss: 3.7928 - val_accuracy: 0.1095 Epoch 32/40 Epoch 00032: val_loss improved from 3.34038 to 3.32194, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 104s - loss: 3.3882 - accuracy: 0.1688 - val_loss: 3.3219 - val_accuracy: 0.1790 Epoch 33/40 Epoch 00033: val_loss did not improve from 3.32194 409/409 - 103s - loss: 3.3996 - accuracy: 0.1633 - val_loss: 3.3817 - val_accuracy: 0.1633 Epoch 34/40 Epoch 00034: val_loss improved from 3.32194 to 3.27653, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 3.3728 - accuracy: 0.1623 - val_loss: 3.2765 - val_accuracy: 0.1726 Epoch 35/40 Epoch 00035: val_loss did not improve from 3.27653 409/409 - 104s - loss: 3.3594 - accuracy: 0.1707 - val_loss: 3.6116 - val_accuracy: 0.1511 Epoch 36/40 Epoch 00036: val_loss improved from 3.27653 to 3.23442, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold1.h5 409/409 - 103s - loss: 3.3134 - accuracy: 0.1778 - val_loss: 3.2344 - val_accuracy: 0.2015 Epoch 37/40 Epoch 00037: val_loss did not improve from 3.23442 409/409 - 103s - loss: 3.3218 - accuracy: 0.1750 - val_loss: 3.4117 - val_accuracy: 0.1721 Epoch 38/40 Epoch 00038: val_loss did not improve from 3.23442 409/409 - 104s - loss: 3.2803 - accuracy: 0.1788 - val_loss: 3.5030 - val_accuracy: 0.1428 Epoch 39/40 Epoch 00039: val_loss did not improve from 3.23442 409/409 - 103s - loss: 3.2859 - accuracy: 0.1801 - val_loss: 3.3076 - val_accuracy: 0.1834 Epoch 40/40 Epoch 00040: val_loss did not improve from 3.23442 409/409 - 104s - loss: 3.2754 - accuracy: 0.1842 - val_loss: 3.3805 - val_accuracy: 0.1619 Fitting model with fold no. 3 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 5.34751, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 106s - loss: 4.8368 - accuracy: 0.0127 - val_loss: 5.3475 - val_accuracy: 0.0098 Epoch 2/40 Epoch 00002: val_loss improved from 5.34751 to 4.79478, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 4.7010 - accuracy: 0.0196 - val_loss: 4.7948 - val_accuracy: 0.0166 Epoch 3/40 Epoch 00003: val_loss improved from 4.79478 to 4.50186, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 4.5922 - accuracy: 0.0274 - val_loss: 4.5019 - val_accuracy: 0.0416 Epoch 4/40 Epoch 00004: val_loss did not improve from 4.50186 409/409 - 104s - loss: 4.4686 - accuracy: 0.0404 - val_loss: 4.6764 - val_accuracy: 0.0289 Epoch 5/40 Epoch 00005: val_loss did not improve from 4.50186 409/409 - 103s - loss: 4.3949 - accuracy: 0.0424 - val_loss: 4.6948 - val_accuracy: 0.0333 Epoch 6/40 Epoch 00006: val_loss improved from 4.50186 to 4.24882, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 4.3204 - accuracy: 0.0510 - val_loss: 4.2488 - val_accuracy: 0.0445 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.24882 409/409 - 104s - loss: 4.2496 - accuracy: 0.0545 - val_loss: 9.9461 - val_accuracy: 0.0269 Epoch 8/40 Epoch 00008: val_loss did not improve from 4.24882 409/409 - 103s - loss: 4.1811 - accuracy: 0.0592 - val_loss: 4.5959 - val_accuracy: 0.0338 Epoch 9/40 Epoch 00009: val_loss did not improve from 4.24882 409/409 - 103s - loss: 4.1305 - accuracy: 0.0635 - val_loss: 4.4714 - val_accuracy: 0.0377 Epoch 10/40 Epoch 00010: val_loss did not improve from 4.24882 409/409 - 103s - loss: 4.0645 - accuracy: 0.0706 - val_loss: 4.6401 - val_accuracy: 0.0318 Epoch 11/40 Epoch 00011: val_loss improved from 4.24882 to 4.03015, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 4.0226 - accuracy: 0.0747 - val_loss: 4.0301 - val_accuracy: 0.0739 Epoch 12/40 Epoch 00012: val_loss improved from 4.03015 to 4.01705, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 3.9663 - accuracy: 0.0819 - val_loss: 4.0171 - val_accuracy: 0.0812 Epoch 13/40 Epoch 00013: val_loss improved from 4.01705 to 3.86215, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 104s - loss: 3.9118 - accuracy: 0.0873 - val_loss: 3.8621 - val_accuracy: 0.0890 Epoch 14/40 Epoch 00014: val_loss did not improve from 3.86215 409/409 - 103s - loss: 3.8778 - accuracy: 0.0896 - val_loss: 3.8946 - val_accuracy: 0.0856 Epoch 15/40 Epoch 00015: val_loss did not improve from 3.86215 409/409 - 103s - loss: 3.8284 - accuracy: 0.0943 - val_loss: 3.9874 - val_accuracy: 0.0959 Epoch 16/40 Epoch 00016: val_loss improved from 3.86215 to 3.78099, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 104s - loss: 3.7860 - accuracy: 0.1075 - val_loss: 3.7810 - val_accuracy: 0.1062 Epoch 17/40 Epoch 00017: val_loss did not improve from 3.78099 409/409 - 103s - loss: 3.7707 - accuracy: 0.0983 - val_loss: 4.1877 - val_accuracy: 0.0714 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.78099 409/409 - 103s - loss: 3.7217 - accuracy: 0.1115 - val_loss: 3.9905 - val_accuracy: 0.0890 Epoch 19/40 Epoch 00019: val_loss improved from 3.78099 to 3.55879, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 3.6794 - accuracy: 0.1154 - val_loss: 3.5588 - val_accuracy: 0.1404 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.55879 409/409 - 103s - loss: 3.6546 - accuracy: 0.1225 - val_loss: 3.5911 - val_accuracy: 0.1433 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.55879 409/409 - 103s - loss: 3.6205 - accuracy: 0.1235 - val_loss: 3.9530 - val_accuracy: 0.1047 Epoch 22/40 Epoch 00022: val_loss did not improve from 3.55879 409/409 - 103s - loss: 3.5937 - accuracy: 0.1373 - val_loss: 3.6721 - val_accuracy: 0.1248 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.55879 409/409 - 103s - loss: 3.5724 - accuracy: 0.1338 - val_loss: 3.5692 - val_accuracy: 0.1429 Epoch 24/40 Epoch 00024: val_loss improved from 3.55879 to 3.45235, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 3.5510 - accuracy: 0.1406 - val_loss: 3.4524 - val_accuracy: 0.1512 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.45235 409/409 - 104s - loss: 3.5306 - accuracy: 0.1425 - val_loss: 3.5965 - val_accuracy: 0.1463 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.45235 409/409 - 103s - loss: 3.4921 - accuracy: 0.1498 - val_loss: 3.6199 - val_accuracy: 0.1375 Epoch 27/40 Epoch 00027: val_loss improved from 3.45235 to 3.43603, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 3.4659 - accuracy: 0.1439 - val_loss: 3.4360 - val_accuracy: 0.1644 Epoch 28/40 Epoch 00028: val_loss improved from 3.43603 to 3.40352, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 3.4678 - accuracy: 0.1508 - val_loss: 3.4035 - val_accuracy: 0.1570 Epoch 29/40 Epoch 00029: val_loss improved from 3.40352 to 3.36190, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 103s - loss: 3.4260 - accuracy: 0.1543 - val_loss: 3.3619 - val_accuracy: 0.1673 Epoch 30/40 Epoch 00030: val_loss did not improve from 3.36190 409/409 - 103s - loss: 3.4186 - accuracy: 0.1571 - val_loss: 3.5220 - val_accuracy: 0.1566 Epoch 31/40 Epoch 00031: val_loss did not improve from 3.36190 409/409 - 104s - loss: 3.4040 - accuracy: 0.1613 - val_loss: 3.4347 - val_accuracy: 0.1747 Epoch 32/40 Epoch 00032: val_loss did not improve from 3.36190 409/409 - 103s - loss: 3.3759 - accuracy: 0.1700 - val_loss: 3.4332 - val_accuracy: 0.1614 Epoch 33/40 Epoch 00033: val_loss did not improve from 3.36190 409/409 - 103s - loss: 3.3601 - accuracy: 0.1678 - val_loss: 3.4646 - val_accuracy: 0.1600 Epoch 34/40 Epoch 00034: val_loss improved from 3.36190 to 3.33771, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 104s - loss: 3.3421 - accuracy: 0.1667 - val_loss: 3.3377 - val_accuracy: 0.1844 Epoch 35/40 Epoch 00035: val_loss did not improve from 3.33771 409/409 - 104s - loss: 3.3416 - accuracy: 0.1712 - val_loss: 3.4203 - val_accuracy: 0.1693 Epoch 36/40 Epoch 00036: val_loss did not improve from 3.33771 409/409 - 103s - loss: 3.3128 - accuracy: 0.1741 - val_loss: 3.4391 - val_accuracy: 0.1649 Epoch 37/40 Epoch 00037: val_loss improved from 3.33771 to 3.24907, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 110s - loss: 3.3023 - accuracy: 0.1787 - val_loss: 3.2491 - val_accuracy: 0.1986 Epoch 38/40 Epoch 00038: val_loss improved from 3.24907 to 3.17756, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold2.h5 409/409 - 123s - loss: 3.2578 - accuracy: 0.1845 - val_loss: 3.1776 - val_accuracy: 0.2040 Epoch 39/40 Epoch 00039: val_loss did not improve from 3.17756 409/409 - 117s - loss: 3.2662 - accuracy: 0.1832 - val_loss: 3.5155 - val_accuracy: 0.1575 Epoch 40/40 Epoch 00040: val_loss did not improve from 3.17756 409/409 - 119s - loss: 3.2319 - accuracy: 0.1882 - val_loss: 3.3111 - val_accuracy: 0.1830 Fitting model with fold no. 4 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.77474, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 118s - loss: 4.8369 - accuracy: 0.0111 - val_loss: 4.7747 - val_accuracy: 0.0161 Epoch 2/40 Epoch 00002: val_loss improved from 4.77474 to 4.66889, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 118s - loss: 4.7363 - accuracy: 0.0170 - val_loss: 4.6689 - val_accuracy: 0.0259 Epoch 3/40 Epoch 00003: val_loss improved from 4.66889 to 4.63011, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 118s - loss: 4.6534 - accuracy: 0.0260 - val_loss: 4.6301 - val_accuracy: 0.0245 Epoch 4/40 Epoch 00004: val_loss did not improve from 4.63011 409/409 - 120s - loss: 4.5802 - accuracy: 0.0313 - val_loss: 4.7524 - val_accuracy: 0.0269 Epoch 5/40 Epoch 00005: val_loss improved from 4.63011 to 4.40656, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 117s - loss: 4.4824 - accuracy: 0.0356 - val_loss: 4.4066 - val_accuracy: 0.0435 Epoch 6/40 Epoch 00006: val_loss did not improve from 4.40656 409/409 - 120s - loss: 4.3925 - accuracy: 0.0410 - val_loss: 4.5574 - val_accuracy: 0.0328 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.40656 409/409 - 128s - loss: 4.3002 - accuracy: 0.0484 - val_loss: 4.5338 - val_accuracy: 0.0372 Epoch 8/40 Epoch 00008: val_loss did not improve from 4.40656 409/409 - 117s - loss: 4.2508 - accuracy: 0.0536 - val_loss: 4.9041 - val_accuracy: 0.0455 Epoch 9/40 Epoch 00009: val_loss improved from 4.40656 to 4.08579, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 117s - loss: 4.1759 - accuracy: 0.0608 - val_loss: 4.0858 - val_accuracy: 0.0734 Epoch 10/40 Epoch 00010: val_loss did not improve from 4.08579 409/409 - 116s - loss: 4.1152 - accuracy: 0.0660 - val_loss: 4.0902 - val_accuracy: 0.0680 Epoch 11/40 Epoch 00011: val_loss did not improve from 4.08579 409/409 - 116s - loss: 4.0597 - accuracy: 0.0706 - val_loss: 4.1031 - val_accuracy: 0.0700 Epoch 12/40 Epoch 00012: val_loss did not improve from 4.08579 409/409 - 116s - loss: 4.0169 - accuracy: 0.0803 - val_loss: 4.1189 - val_accuracy: 0.0675 Epoch 13/40 Epoch 00013: val_loss did not improve from 4.08579 409/409 - 116s - loss: 3.9603 - accuracy: 0.0791 - val_loss: 4.3539 - val_accuracy: 0.0660 Epoch 14/40 Epoch 00014: val_loss improved from 4.08579 to 3.89160, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 116s - loss: 3.9080 - accuracy: 0.0887 - val_loss: 3.8916 - val_accuracy: 0.0793 Epoch 15/40 Epoch 00015: val_loss did not improve from 3.89160 409/409 - 117s - loss: 3.8733 - accuracy: 0.0912 - val_loss: 4.0508 - val_accuracy: 0.0783 Epoch 16/40 Epoch 00016: val_loss improved from 3.89160 to 3.87717, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 116s - loss: 3.8153 - accuracy: 0.1000 - val_loss: 3.8772 - val_accuracy: 0.1008 Epoch 17/40 Epoch 00017: val_loss improved from 3.87717 to 3.80597, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 116s - loss: 3.7730 - accuracy: 0.1041 - val_loss: 3.8060 - val_accuracy: 0.1150 Epoch 18/40 Epoch 00018: val_loss did not improve from 3.80597 409/409 - 116s - loss: 3.7513 - accuracy: 0.1075 - val_loss: 3.8427 - val_accuracy: 0.0988 Epoch 19/40 Epoch 00019: val_loss did not improve from 3.80597 409/409 - 116s - loss: 3.7118 - accuracy: 0.1147 - val_loss: 4.2644 - val_accuracy: 0.0675 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.80597 409/409 - 116s - loss: 3.6829 - accuracy: 0.1158 - val_loss: 3.8901 - val_accuracy: 0.1101 Epoch 21/40 Epoch 00021: val_loss improved from 3.80597 to 3.61615, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 117s - loss: 3.6423 - accuracy: 0.1211 - val_loss: 3.6162 - val_accuracy: 0.1287 Epoch 22/40 Epoch 00022: val_loss did not improve from 3.61615 409/409 - 115s - loss: 3.6300 - accuracy: 0.1312 - val_loss: 3.8542 - val_accuracy: 0.1184 Epoch 23/40 Epoch 00023: val_loss improved from 3.61615 to 3.50526, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 104s - loss: 3.5694 - accuracy: 0.1339 - val_loss: 3.5053 - val_accuracy: 0.1507 Epoch 24/40 Epoch 00024: val_loss improved from 3.50526 to 3.45352, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 103s - loss: 3.5616 - accuracy: 0.1425 - val_loss: 3.4535 - val_accuracy: 0.1517 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.45352 409/409 - 103s - loss: 3.5242 - accuracy: 0.1467 - val_loss: 3.5293 - val_accuracy: 0.1473 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.45352 409/409 - 105s - loss: 3.5013 - accuracy: 0.1466 - val_loss: 3.4929 - val_accuracy: 0.1517 Epoch 27/40 Epoch 00027: val_loss did not improve from 3.45352 409/409 - 104s - loss: 3.4890 - accuracy: 0.1511 - val_loss: 3.4808 - val_accuracy: 0.1487 Epoch 28/40 Epoch 00028: val_loss did not improve from 3.45352 409/409 - 104s - loss: 3.4558 - accuracy: 0.1535 - val_loss: 3.4803 - val_accuracy: 0.1619 Epoch 29/40 Epoch 00029: val_loss did not improve from 3.45352 409/409 - 104s - loss: 3.4290 - accuracy: 0.1601 - val_loss: 3.5266 - val_accuracy: 0.1575 Epoch 30/40 Epoch 00030: val_loss improved from 3.45352 to 3.33593, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 115s - loss: 3.4173 - accuracy: 0.1591 - val_loss: 3.3359 - val_accuracy: 0.1619 Epoch 31/40 Epoch 00031: val_loss did not improve from 3.33593 409/409 - 116s - loss: 3.3943 - accuracy: 0.1651 - val_loss: 3.7340 - val_accuracy: 0.1350 Epoch 32/40 Epoch 00032: val_loss did not improve from 3.33593 409/409 - 117s - loss: 3.3852 - accuracy: 0.1659 - val_loss: 3.6372 - val_accuracy: 0.1336 Epoch 33/40 Epoch 00033: val_loss did not improve from 3.33593 409/409 - 116s - loss: 3.3338 - accuracy: 0.1716 - val_loss: 3.3838 - val_accuracy: 0.1693 Epoch 34/40 Epoch 00034: val_loss improved from 3.33593 to 3.28585, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 116s - loss: 3.3343 - accuracy: 0.1662 - val_loss: 3.2859 - val_accuracy: 0.1830 Epoch 35/40 Epoch 00035: val_loss did not improve from 3.28585 409/409 - 116s - loss: 3.3026 - accuracy: 0.1790 - val_loss: 3.3918 - val_accuracy: 0.1747 Epoch 36/40 Epoch 00036: val_loss did not improve from 3.28585 409/409 - 116s - loss: 3.2996 - accuracy: 0.1804 - val_loss: 3.4644 - val_accuracy: 0.1634 Epoch 37/40 Epoch 00037: val_loss improved from 3.28585 to 3.15296, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold3.h5 409/409 - 116s - loss: 3.2744 - accuracy: 0.1865 - val_loss: 3.1530 - val_accuracy: 0.2133 Epoch 38/40 Epoch 00038: val_loss did not improve from 3.15296 409/409 - 116s - loss: 3.2560 - accuracy: 0.1790 - val_loss: 3.3115 - val_accuracy: 0.1952 Epoch 39/40 Epoch 00039: val_loss did not improve from 3.15296 409/409 - 116s - loss: 3.2096 - accuracy: 0.1945 - val_loss: 3.2070 - val_accuracy: 0.1937 Epoch 40/40 Epoch 00040: val_loss did not improve from 3.15296 409/409 - 116s - loss: 3.2103 - accuracy: 0.1950 - val_loss: 3.2890 - val_accuracy: 0.1972 Fitting model with fold no. 5 Found 8178 validated image filenames belonging to 120 classes. Found 2044 validated image filenames belonging to 120 classes. Epoch 1/40 Epoch 00001: val_loss improved from inf to 4.78475, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 116s - loss: 4.8348 - accuracy: 0.0105 - val_loss: 4.7847 - val_accuracy: 0.0132 Epoch 2/40 Epoch 00002: val_loss improved from 4.78475 to 4.67379, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 116s - loss: 4.7274 - accuracy: 0.0181 - val_loss: 4.6738 - val_accuracy: 0.0279 Epoch 3/40 Epoch 00003: val_loss improved from 4.67379 to 4.55188, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 111s - loss: 4.6349 - accuracy: 0.0251 - val_loss: 4.5519 - val_accuracy: 0.0308 Epoch 4/40 Epoch 00004: val_loss improved from 4.55188 to 4.51882, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 103s - loss: 4.5540 - accuracy: 0.0293 - val_loss: 4.5188 - val_accuracy: 0.0323 Epoch 5/40 Epoch 00005: val_loss improved from 4.51882 to 4.36296, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 4.4627 - accuracy: 0.0367 - val_loss: 4.3630 - val_accuracy: 0.0475 Epoch 6/40 Epoch 00006: val_loss improved from 4.36296 to 4.35832, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 4.3790 - accuracy: 0.0429 - val_loss: 4.3583 - val_accuracy: 0.0499 Epoch 7/40 Epoch 00007: val_loss did not improve from 4.35832 409/409 - 104s - loss: 4.3204 - accuracy: 0.0457 - val_loss: 5.8470 - val_accuracy: 0.0245 Epoch 8/40 Epoch 00008: val_loss improved from 4.35832 to 4.26300, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 4.2479 - accuracy: 0.0538 - val_loss: 4.2630 - val_accuracy: 0.0543 Epoch 9/40 Epoch 00009: val_loss improved from 4.26300 to 4.19835, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 4.1902 - accuracy: 0.0597 - val_loss: 4.1984 - val_accuracy: 0.0675 Epoch 10/40 Epoch 00010: val_loss improved from 4.19835 to 4.15875, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 4.1212 - accuracy: 0.0664 - val_loss: 4.1588 - val_accuracy: 0.0665 Epoch 11/40 Epoch 00011: val_loss improved from 4.15875 to 4.08503, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 4.0974 - accuracy: 0.0680 - val_loss: 4.0850 - val_accuracy: 0.0744 Epoch 12/40 Epoch 00012: val_loss did not improve from 4.08503 409/409 - 104s - loss: 4.0413 - accuracy: 0.0743 - val_loss: 4.1901 - val_accuracy: 0.0621 Epoch 13/40 Epoch 00013: val_loss improved from 4.08503 to 3.94923, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 3.9939 - accuracy: 0.0747 - val_loss: 3.9492 - val_accuracy: 0.0866 Epoch 14/40 Epoch 00014: val_loss improved from 3.94923 to 3.92521, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 3.9468 - accuracy: 0.0858 - val_loss: 3.9252 - val_accuracy: 0.0881 Epoch 15/40 Epoch 00015: val_loss improved from 3.92521 to 3.84011, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 105s - loss: 3.8981 - accuracy: 0.0877 - val_loss: 3.8401 - val_accuracy: 0.1057 Epoch 16/40 Epoch 00016: val_loss did not improve from 3.84011 409/409 - 103s - loss: 3.8726 - accuracy: 0.0894 - val_loss: 4.1205 - val_accuracy: 0.0685 Epoch 17/40 Epoch 00017: val_loss did not improve from 3.84011 409/409 - 104s - loss: 3.8303 - accuracy: 0.1050 - val_loss: 3.8529 - val_accuracy: 0.0944 Epoch 18/40 Epoch 00018: val_loss improved from 3.84011 to 3.74332, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 103s - loss: 3.7919 - accuracy: 0.1047 - val_loss: 3.7433 - val_accuracy: 0.1071 Epoch 19/40 Epoch 00019: val_loss improved from 3.74332 to 3.70856, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 104s - loss: 3.7523 - accuracy: 0.1071 - val_loss: 3.7086 - val_accuracy: 0.1208 Epoch 20/40 Epoch 00020: val_loss did not improve from 3.70856 409/409 - 119s - loss: 3.7120 - accuracy: 0.1118 - val_loss: 3.8011 - val_accuracy: 0.1115 Epoch 21/40 Epoch 00021: val_loss did not improve from 3.70856 409/409 - 116s - loss: 3.6860 - accuracy: 0.1138 - val_loss: 3.7492 - val_accuracy: 0.1267 Epoch 22/40 Epoch 00022: val_loss did not improve from 3.70856 409/409 - 117s - loss: 3.6587 - accuracy: 0.1267 - val_loss: 3.9965 - val_accuracy: 0.0934 Epoch 23/40 Epoch 00023: val_loss did not improve from 3.70856 409/409 - 117s - loss: 3.6201 - accuracy: 0.1236 - val_loss: 3.9436 - val_accuracy: 0.0954 Epoch 24/40 Epoch 00024: val_loss improved from 3.70856 to 3.53968, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 117s - loss: 3.6050 - accuracy: 0.1354 - val_loss: 3.5397 - val_accuracy: 0.1380 Epoch 25/40 Epoch 00025: val_loss did not improve from 3.53968 409/409 - 117s - loss: 3.5651 - accuracy: 0.1333 - val_loss: 3.7522 - val_accuracy: 0.1179 Epoch 26/40 Epoch 00026: val_loss did not improve from 3.53968 409/409 - 117s - loss: 3.5560 - accuracy: 0.1389 - val_loss: 3.5842 - val_accuracy: 0.1414 Epoch 27/40 Epoch 00027: val_loss improved from 3.53968 to 3.51671, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 117s - loss: 3.5104 - accuracy: 0.1433 - val_loss: 3.5167 - val_accuracy: 0.1590 Epoch 28/40 Epoch 00028: val_loss improved from 3.51671 to 3.45194, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 117s - loss: 3.5046 - accuracy: 0.1472 - val_loss: 3.4519 - val_accuracy: 0.1668 Epoch 29/40 Epoch 00029: val_loss improved from 3.45194 to 3.35227, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 117s - loss: 3.4438 - accuracy: 0.1493 - val_loss: 3.3523 - val_accuracy: 0.1673 Epoch 30/40 Epoch 00030: val_loss did not improve from 3.35227 409/409 - 117s - loss: 3.4392 - accuracy: 0.1500 - val_loss: 3.3794 - val_accuracy: 0.1771 Epoch 31/40 Epoch 00031: val_loss improved from 3.35227 to 3.32952, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 117s - loss: 3.4291 - accuracy: 0.1548 - val_loss: 3.3295 - val_accuracy: 0.1747 Epoch 32/40 Epoch 00032: val_loss improved from 3.32952 to 3.29097, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 117s - loss: 3.4117 - accuracy: 0.1625 - val_loss: 3.2910 - val_accuracy: 0.1908 Epoch 33/40 Epoch 00033: val_loss did not improve from 3.29097 409/409 - 113s - loss: 3.4120 - accuracy: 0.1553 - val_loss: 3.3876 - val_accuracy: 0.1781 Epoch 34/40 Epoch 00034: val_loss did not improve from 3.29097 409/409 - 106s - loss: 3.3596 - accuracy: 0.1681 - val_loss: 3.4008 - val_accuracy: 0.1835 Epoch 35/40 Epoch 00035: val_loss did not improve from 3.29097 409/409 - 108s - loss: 3.3647 - accuracy: 0.1708 - val_loss: 3.5292 - val_accuracy: 0.1531 Epoch 36/40 Epoch 00036: val_loss did not improve from 3.29097 409/409 - 117s - loss: 3.3271 - accuracy: 0.1713 - val_loss: 3.5005 - val_accuracy: 0.1629 Epoch 37/40 Epoch 00037: val_loss did not improve from 3.29097 409/409 - 125s - loss: 3.3133 - accuracy: 0.1747 - val_loss: 3.3306 - val_accuracy: 0.1923 Epoch 38/40 Epoch 00038: val_loss did not improve from 3.29097 409/409 - 139s - loss: 3.2932 - accuracy: 0.1789 - val_loss: 3.3392 - val_accuracy: 0.1820 Epoch 39/40 Epoch 00039: val_loss did not improve from 3.29097 409/409 - 131s - loss: 3.2807 - accuracy: 0.1723 - val_loss: 3.4023 - val_accuracy: 0.1761 Epoch 40/40 Epoch 00040: val_loss improved from 3.29097 to 3.17918, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_fold4.h5 409/409 - 132s - loss: 3.2612 - accuracy: 0.1855 - val_loss: 3.1792 - val_accuracy: 0.2148
plot_model_fold_stats(k=k, history=improved_cnn_history_aug)
(5, 2)
print_model_last_epoch_stats(history=improved_cnn_history_aug)
Mean fold accuracy: 18.96% Mean fold loss: 3.23105731010437 Mean fold validation accuracy: 19.07% Mean fold validation loss: 3.285396909713745
improved_cnn_aug_metrics = calculate_model_metrics(saved_model_prefix='augment_simple_cnn_fold', dims=224,
create_model_func=lambda : create_improved_model('adam'), folds=train_weighted_folds, preprocess_func=lambda s: s / 255., dtype=np.float64)
plot_confusion_matrix(improved_cnn_aug_metrics['confusion'], labels=labels, title="Confusion Matrix Simple CNN Improved with Augmentation")
read_and_plot_correct_wrong(correct=improved_cnn_aug_metrics['correct'], wrong=improved_cnn_aug_metrics['wrong'])
print('Correct percentage: {}%'.format(round(improved_cnn_aug_metrics['correct_perc'] * 100, 2)))
Correct percentage: 1.13%
def retrain_improved_model_augmentation(epochs):
model = create_improved_model(optimizer=Adam(learning_rate=0.001))
model.load_weights(join(model_save_dir, f"model_augment_simple_cnn_fold0.h5"))
callbacks = get_callbacks(saved_model_name=f'augment_simple_cnn_retrain0')
train_data_generator = aug_image_gen.flow_from_dataframe(dataframe=train_weighted_folds[0]['train'], target_size=large_image_size, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
valid_data_generator = valid_image_gen.flow_from_dataframe(dataframe=train_weighted_folds[0]['valid'], target_size=large_image_size, directory=train_dir,
x_col = 'id', y_col = 'breed', class_mode = "categorical",
shuffle = True, batch_size=BATCH_SIZE)
return model.fit(x=train_data_generator, validation_data=valid_data_generator, callbacks=callbacks, epochs=epochs, verbose=2)
improved_cnn_history_aug_retrain = retrain_improved_model_augmentation(epochs=20)
Found 8177 validated image filenames belonging to 120 classes. Found 2045 validated image filenames belonging to 120 classes. Epoch 1/20 Epoch 00001: val_loss improved from inf to 3.25478, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 134s - loss: 3.2492 - accuracy: 0.1887 - val_loss: 3.2548 - val_accuracy: 0.2205 Epoch 2/20 Epoch 00002: val_loss improved from 3.25478 to 3.21507, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 123s - loss: 3.2047 - accuracy: 0.1898 - val_loss: 3.2151 - val_accuracy: 0.2225 Epoch 3/20 Epoch 00003: val_loss did not improve from 3.21507 409/409 - 126s - loss: 3.1980 - accuracy: 0.1975 - val_loss: 3.2302 - val_accuracy: 0.2054 Epoch 4/20 Epoch 00004: val_loss did not improve from 3.21507 409/409 - 123s - loss: 3.1696 - accuracy: 0.2026 - val_loss: 3.2967 - val_accuracy: 0.2029 Epoch 5/20 Epoch 00005: val_loss did not improve from 3.21507 409/409 - 125s - loss: 3.1839 - accuracy: 0.2059 - val_loss: 3.2731 - val_accuracy: 0.2191 Epoch 6/20 Epoch 00006: val_loss improved from 3.21507 to 3.21064, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 130s - loss: 3.1323 - accuracy: 0.2029 - val_loss: 3.2106 - val_accuracy: 0.2362 Epoch 7/20 Epoch 00007: val_loss improved from 3.21064 to 3.05898, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 130s - loss: 3.1222 - accuracy: 0.2073 - val_loss: 3.0590 - val_accuracy: 0.2318 Epoch 8/20 Epoch 00008: val_loss improved from 3.05898 to 3.05464, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 131s - loss: 3.1184 - accuracy: 0.2059 - val_loss: 3.0546 - val_accuracy: 0.2440 Epoch 9/20 Epoch 00009: val_loss did not improve from 3.05464 409/409 - 137s - loss: 3.1131 - accuracy: 0.2075 - val_loss: 3.0846 - val_accuracy: 0.2337 Epoch 10/20 Epoch 00010: val_loss improved from 3.05464 to 3.04049, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 128s - loss: 3.0925 - accuracy: 0.2107 - val_loss: 3.0405 - val_accuracy: 0.2347 Epoch 11/20 Epoch 00011: val_loss improved from 3.04049 to 3.00154, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 132s - loss: 3.0647 - accuracy: 0.2177 - val_loss: 3.0015 - val_accuracy: 0.2518 Epoch 12/20 Epoch 00012: val_loss improved from 3.00154 to 2.94483, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 118s - loss: 3.0864 - accuracy: 0.2161 - val_loss: 2.9448 - val_accuracy: 0.2621 Epoch 13/20 Epoch 00013: val_loss did not improve from 2.94483 409/409 - 118s - loss: 3.0656 - accuracy: 0.2193 - val_loss: 3.0049 - val_accuracy: 0.2528 Epoch 14/20 Epoch 00014: val_loss did not improve from 2.94483 409/409 - 121s - loss: 3.0597 - accuracy: 0.2260 - val_loss: 3.0298 - val_accuracy: 0.2572 Epoch 15/20 Epoch 00015: val_loss did not improve from 2.94483 409/409 - 125s - loss: 3.0242 - accuracy: 0.2247 - val_loss: 2.9827 - val_accuracy: 0.2631 Epoch 16/20 Epoch 00016: val_loss improved from 2.94483 to 2.85092, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_augment_simple_cnn_retrain0.h5 409/409 - 137s - loss: 3.0136 - accuracy: 0.2275 - val_loss: 2.8509 - val_accuracy: 0.2929 Epoch 17/20 Epoch 00017: val_loss did not improve from 2.85092 409/409 - 126s - loss: 2.9998 - accuracy: 0.2271 - val_loss: 3.0830 - val_accuracy: 0.2465 Epoch 18/20 Epoch 00018: val_loss did not improve from 2.85092 409/409 - 122s - loss: 2.9974 - accuracy: 0.2322 - val_loss: 3.0611 - val_accuracy: 0.2587 Epoch 19/20 Epoch 00019: val_loss did not improve from 2.85092 409/409 - 124s - loss: 2.9830 - accuracy: 0.2299 - val_loss: 2.9721 - val_accuracy: 0.2611 Epoch 20/20 Epoch 00020: val_loss did not improve from 2.85092 409/409 - 132s - loss: 3.0012 - accuracy: 0.2349 - val_loss: 2.8839 - val_accuracy: 0.2807
def plot_model_stats(h):
fig, ax = plt.subplots(2, 1,figsize=(10,10))
ax[0].plot(h.history['accuracy'])
ax[0].plot(h.history['val_accuracy'])
ax[0].set_title(f'Model accuracy')
ax[0].set_ylabel('Accuracy')
ax[0].set_xlabel('Epoch')
ax[0].legend(['Train', 'Valid'], loc='upper left')
# Plot training & validation loss values
ax[1].plot(h.history['loss'])
ax[1].plot(h.history['val_loss'])
ax[1].set_title(f'Model loss')
ax[1].set_ylabel('Loss')
ax[1].set_xlabel('Epoch')
ax[1].legend(['Train', 'Valid'], loc='upper left')
fig.tight_layout(pad=3.0)
plt.show()
plot_model_stats(improved_cnn_history_aug_retrain)
improved_cnn_aug_metrics_retrain = calculate_model_metrics(saved_model_prefix='augment_simple_cnn_retrain', dims=224,
create_model_func=lambda : create_improved_model('adam'), folds=train_weighted_folds, preprocess_func=lambda s: s / 255., dtype=np.float64, fold_i=0)
plot_confusion_matrix(improved_cnn_aug_metrics_retrain['confusion'], labels=labels, title="Confusion Matrix Simple CNN Improved with Augmentation - Second run (Ttoal 80 epochs)")
read_and_plot_correct_wrong(correct=improved_cnn_aug_metrics_retrain['correct'], wrong=improved_cnn_aug_metrics_retrain['wrong'])
We can see by the samples that we got a small improvement in terms of classification.
The correct samples predcitions % went up and some of the wrong predictions can be tricky to seperate between.
print('Correct percentage: {}%'.format(round(improved_cnn_aug_metrics_retrain['correct_perc'] * 100, 2)))
Correct percentage: 1.03%
X_test, test_ids = get_test_samples(preprocess_func=lambda s: s / 255., dims=224, dtype=np.float64)
submission_data = create_submission_file(X_test=X_test, test_ids=test_ids, name='improved_augmentation_submission.csv', dims=224,
saved_model_prefix='model_augment_simple_cnn_fold', create_model_func= lambda : create_improved_model('adam'))
submission_data
Due to disabling neurons, some of information about each sample is lost, and the subsequent layers attempt to construct the answers basing on incomplete representations.
The training loss is higher because we've probably made it artificially harder for the network to give the right answers. However, during validation all of the neurons are available, so the network has its full computational power - and thus it might perform better than in training.
Inception v3 is a widely-used image recognition model that has been shown to attain greater than 78.1% accuracy on the ImageNet dataset.
The model is the culmination of many ideas developed by multiple researchers over the years. It is based on the original paper: "Rethinking the Inception Architecture for Computer Vision" by Szegedy, et. al.
A high-level diagram of the model is shown below:
display_image("inceptionv3.jpg")
Inception v3 TPU training runs match accuracy curves produced by GPU jobs of similar configuration. The model has been successfully trained on v2-8, v2-128, and v2-512 configurations. The model has attained greater than 78.1% accuracy in about 170 epochs on each of these. The model itself is made up of symmetric and asymmetric building blocks, including convolutions, average pooling, max pooling, concats, dropouts, and fully connected layers. Batchnorm is used extensively throughout the model and applied to activation inputs. Loss is computed via Softmax.
Inception basic block:display_image("inceptionv3_block.jpg")
Comparison of Inception V3 against other known models:
display_image("comparison.png")
We can see that InceptionV3 is in the upper spectrum when it comes to accuracy and efficiency.
You can learn more about InceptionV3 in the following article: Inception V3 Paperfrom keras.applications.inception_v3 import InceptionV3
inception_v3 = InceptionV3(include_top=False, weights='imagenet', input_shape=(224,224,3))
inception_v3.summary()
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_27 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 111, 111, 32) 864 input_27[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 111, 111, 32) 96 conv2d_130[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, 111, 111, 32) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 109, 109, 32) 9216 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 109, 109, 32) 96 conv2d_131[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 109, 109, 32) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 109, 109, 64) 18432 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 109, 109, 64) 192 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 109, 109, 64) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
max_pooling2d_126 (MaxPooling2D (None, 54, 54, 64) 0 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 54, 54, 80) 5120 max_pooling2d_126[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 54, 54, 80) 240 conv2d_133[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 54, 54, 80) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 52, 52, 192) 138240 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 52, 52, 192) 576 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 52, 52, 192) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
max_pooling2d_127 (MaxPooling2D (None, 25, 25, 192) 0 activation_4[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 25, 25, 64) 192 conv2d_138[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 25, 25, 64) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 25, 25, 48) 9216 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 25, 25, 96) 55296 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 25, 25, 48) 144 conv2d_136[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 25, 25, 96) 288 conv2d_139[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 25, 25, 48) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 25, 25, 96) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 25, 25, 192) 0 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 25, 25, 64) 76800 activation_6[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 25, 25, 96) 82944 activation_9[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 25, 25, 32) 6144 average_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 25, 25, 64) 192 conv2d_135[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 25, 25, 64) 192 conv2d_137[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 25, 25, 96) 288 conv2d_140[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 25, 25, 32) 96 conv2d_141[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 25, 25, 64) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 25, 25, 64) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 25, 25, 96) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 25, 25, 32) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, 25, 25, 256) 0 activation_5[0][0]
activation_7[0][0]
activation_10[0][0]
activation_11[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 25, 25, 64) 192 conv2d_145[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 25, 25, 64) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 25, 25, 48) 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 25, 25, 96) 55296 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 25, 25, 48) 144 conv2d_143[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 25, 25, 96) 288 conv2d_146[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 25, 25, 48) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 25, 25, 96) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 25, 25, 256) 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 25, 25, 64) 76800 activation_13[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 25, 25, 96) 82944 activation_16[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 25, 25, 64) 16384 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 25, 25, 64) 192 conv2d_142[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 25, 25, 64) 192 conv2d_144[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 25, 25, 96) 288 conv2d_147[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 25, 25, 64) 192 conv2d_148[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 25, 25, 64) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 25, 25, 64) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 25, 25, 96) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 25, 25, 64) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, 25, 25, 288) 0 activation_12[0][0]
activation_14[0][0]
activation_17[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 25, 25, 64) 192 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 25, 25, 64) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 25, 25, 48) 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 25, 25, 96) 55296 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 25, 25, 48) 144 conv2d_150[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 25, 25, 96) 288 conv2d_153[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 25, 25, 48) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 25, 25, 96) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 25, 25, 288) 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 25, 25, 64) 76800 activation_20[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 25, 25, 96) 82944 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 25, 25, 64) 18432 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 25, 25, 64) 192 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 25, 25, 64) 192 conv2d_151[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 25, 25, 96) 288 conv2d_154[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 25, 25, 64) 192 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 25, 25, 64) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 25, 25, 64) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 25, 25, 96) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 25, 25, 64) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, 25, 25, 288) 0 activation_19[0][0]
activation_21[0][0]
activation_24[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 25, 25, 64) 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 25, 25, 64) 192 conv2d_157[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 25, 25, 64) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 25, 25, 96) 55296 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 25, 25, 96) 288 conv2d_158[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 25, 25, 96) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 12, 12, 384) 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 12, 12, 96) 82944 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 12, 12, 384) 1152 conv2d_156[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 12, 12, 96) 288 conv2d_159[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 12, 12, 384) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 12, 12, 96) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
max_pooling2d_128 (MaxPooling2D (None, 12, 12, 288) 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, 12, 12, 768) 0 activation_26[0][0]
activation_29[0][0]
max_pooling2d_128[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 12, 12, 128) 384 conv2d_164[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 12, 12, 128) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 12, 12, 128) 114688 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 12, 12, 128) 384 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 12, 12, 128) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 12, 12, 128) 114688 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 12, 12, 128) 384 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 12, 12, 128) 384 conv2d_166[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 12, 12, 128) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 12, 12, 128) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 12, 12, 128) 114688 activation_31[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 12, 12, 128) 114688 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 12, 12, 128) 384 conv2d_162[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 12, 12, 128) 384 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 12, 12, 128) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 12, 12, 128) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 12, 12, 768) 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 12, 12, 192) 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 12, 12, 192) 172032 activation_32[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 12, 12, 192) 172032 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 12, 12, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 12, 12, 192) 576 conv2d_163[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 12, 12, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 12, 12, 192) 576 conv2d_169[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 12, 12, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 12, 12, 192) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 12, 12, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 12, 12, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, 12, 12, 768) 0 activation_30[0][0]
activation_33[0][0]
activation_38[0][0]
activation_39[0][0]
__________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 12, 12, 160) 480 conv2d_174[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 12, 12, 160) 0 batch_normalization_174[0][0]
__________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 12, 12, 160) 179200 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 12, 12, 160) 480 conv2d_175[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 12, 12, 160) 0 batch_normalization_175[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 12, 12, 160) 179200 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 12, 12, 160) 480 conv2d_171[0][0]
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 12, 12, 160) 480 conv2d_176[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 12, 12, 160) 0 batch_normalization_171[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 12, 12, 160) 0 batch_normalization_176[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 12, 12, 160) 179200 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 12, 12, 160) 179200 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 12, 12, 160) 480 conv2d_172[0][0]
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 12, 12, 160) 480 conv2d_177[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 12, 12, 160) 0 batch_normalization_172[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 12, 12, 160) 0 batch_normalization_177[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 12, 12, 768) 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 12, 12, 192) 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 12, 12, 192) 215040 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 12, 12, 192) 215040 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 12, 12, 192) 576 conv2d_170[0][0]
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 12, 12, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 12, 12, 192) 576 conv2d_178[0][0]
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 12, 12, 192) 576 conv2d_179[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 12, 12, 192) 0 batch_normalization_170[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 12, 12, 192) 0 batch_normalization_173[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 12, 12, 192) 0 batch_normalization_178[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 12, 12, 192) 0 batch_normalization_179[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, 12, 12, 768) 0 activation_40[0][0]
activation_43[0][0]
activation_48[0][0]
activation_49[0][0]
__________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 12, 12, 160) 480 conv2d_184[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, 12, 12, 160) 0 batch_normalization_184[0][0]
__________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 12, 12, 160) 179200 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 12, 12, 160) 480 conv2d_185[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, 12, 12, 160) 0 batch_normalization_185[0][0]
__________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 12, 12, 160) 179200 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 12, 12, 160) 480 conv2d_181[0][0]
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 12, 12, 160) 480 conv2d_186[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, 12, 12, 160) 0 batch_normalization_181[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, 12, 12, 160) 0 batch_normalization_186[0][0]
__________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 12, 12, 160) 179200 activation_51[0][0]
__________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 12, 12, 160) 179200 activation_56[0][0]
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 12, 12, 160) 480 conv2d_182[0][0]
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 12, 12, 160) 480 conv2d_187[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, 12, 12, 160) 0 batch_normalization_182[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, 12, 12, 160) 0 batch_normalization_187[0][0]
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 12, 12, 768) 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 12, 12, 192) 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 12, 12, 192) 215040 activation_52[0][0]
__________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 12, 12, 192) 215040 activation_57[0][0]
__________________________________________________________________________________________________
conv2d_189 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 12, 12, 192) 576 conv2d_180[0][0]
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 12, 12, 192) 576 conv2d_183[0][0]
__________________________________________________________________________________________________
batch_normalization_188 (BatchN (None, 12, 12, 192) 576 conv2d_188[0][0]
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 12, 12, 192) 576 conv2d_189[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 12, 12, 192) 0 batch_normalization_180[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, 12, 12, 192) 0 batch_normalization_183[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, 12, 12, 192) 0 batch_normalization_188[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, 12, 12, 192) 0 batch_normalization_189[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, 12, 12, 768) 0 activation_50[0][0]
activation_53[0][0]
activation_58[0][0]
activation_59[0][0]
__________________________________________________________________________________________________
conv2d_194 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 12, 12, 192) 576 conv2d_194[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, 12, 12, 192) 0 batch_normalization_194[0][0]
__________________________________________________________________________________________________
conv2d_195 (Conv2D) (None, 12, 12, 192) 258048 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 12, 12, 192) 576 conv2d_195[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, 12, 12, 192) 0 batch_normalization_195[0][0]
__________________________________________________________________________________________________
conv2d_191 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_196 (Conv2D) (None, 12, 12, 192) 258048 activation_65[0][0]
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 12, 12, 192) 576 conv2d_191[0][0]
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 12, 12, 192) 576 conv2d_196[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, 12, 12, 192) 0 batch_normalization_191[0][0]
__________________________________________________________________________________________________
activation_66 (Activation) (None, 12, 12, 192) 0 batch_normalization_196[0][0]
__________________________________________________________________________________________________
conv2d_192 (Conv2D) (None, 12, 12, 192) 258048 activation_61[0][0]
__________________________________________________________________________________________________
conv2d_197 (Conv2D) (None, 12, 12, 192) 258048 activation_66[0][0]
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 12, 12, 192) 576 conv2d_192[0][0]
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 12, 12, 192) 576 conv2d_197[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, 12, 12, 192) 0 batch_normalization_192[0][0]
__________________________________________________________________________________________________
activation_67 (Activation) (None, 12, 12, 192) 0 batch_normalization_197[0][0]
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 12, 12, 768) 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_190 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_193 (Conv2D) (None, 12, 12, 192) 258048 activation_62[0][0]
__________________________________________________________________________________________________
conv2d_198 (Conv2D) (None, 12, 12, 192) 258048 activation_67[0][0]
__________________________________________________________________________________________________
conv2d_199 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 12, 12, 192) 576 conv2d_190[0][0]
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 12, 12, 192) 576 conv2d_193[0][0]
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 12, 12, 192) 576 conv2d_198[0][0]
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 12, 12, 192) 576 conv2d_199[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, 12, 12, 192) 0 batch_normalization_190[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, 12, 12, 192) 0 batch_normalization_193[0][0]
__________________________________________________________________________________________________
activation_68 (Activation) (None, 12, 12, 192) 0 batch_normalization_198[0][0]
__________________________________________________________________________________________________
activation_69 (Activation) (None, 12, 12, 192) 0 batch_normalization_199[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, 12, 12, 768) 0 activation_60[0][0]
activation_63[0][0]
activation_68[0][0]
activation_69[0][0]
__________________________________________________________________________________________________
conv2d_202 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 12, 12, 192) 576 conv2d_202[0][0]
__________________________________________________________________________________________________
activation_72 (Activation) (None, 12, 12, 192) 0 batch_normalization_202[0][0]
__________________________________________________________________________________________________
conv2d_203 (Conv2D) (None, 12, 12, 192) 258048 activation_72[0][0]
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 12, 12, 192) 576 conv2d_203[0][0]
__________________________________________________________________________________________________
activation_73 (Activation) (None, 12, 12, 192) 0 batch_normalization_203[0][0]
__________________________________________________________________________________________________
conv2d_200 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_204 (Conv2D) (None, 12, 12, 192) 258048 activation_73[0][0]
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 12, 12, 192) 576 conv2d_200[0][0]
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 12, 12, 192) 576 conv2d_204[0][0]
__________________________________________________________________________________________________
activation_70 (Activation) (None, 12, 12, 192) 0 batch_normalization_200[0][0]
__________________________________________________________________________________________________
activation_74 (Activation) (None, 12, 12, 192) 0 batch_normalization_204[0][0]
__________________________________________________________________________________________________
conv2d_201 (Conv2D) (None, 5, 5, 320) 552960 activation_70[0][0]
__________________________________________________________________________________________________
conv2d_205 (Conv2D) (None, 5, 5, 192) 331776 activation_74[0][0]
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 5, 5, 320) 960 conv2d_201[0][0]
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 5, 5, 192) 576 conv2d_205[0][0]
__________________________________________________________________________________________________
activation_71 (Activation) (None, 5, 5, 320) 0 batch_normalization_201[0][0]
__________________________________________________________________________________________________
activation_75 (Activation) (None, 5, 5, 192) 0 batch_normalization_205[0][0]
__________________________________________________________________________________________________
max_pooling2d_129 (MaxPooling2D (None, 5, 5, 768) 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, 5, 5, 1280) 0 activation_71[0][0]
activation_75[0][0]
max_pooling2d_129[0][0]
__________________________________________________________________________________________________
conv2d_210 (Conv2D) (None, 5, 5, 448) 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_210 (BatchN (None, 5, 5, 448) 1344 conv2d_210[0][0]
__________________________________________________________________________________________________
activation_80 (Activation) (None, 5, 5, 448) 0 batch_normalization_210[0][0]
__________________________________________________________________________________________________
conv2d_207 (Conv2D) (None, 5, 5, 384) 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_211 (Conv2D) (None, 5, 5, 384) 1548288 activation_80[0][0]
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 5, 5, 384) 1152 conv2d_207[0][0]
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 5, 5, 384) 1152 conv2d_211[0][0]
__________________________________________________________________________________________________
activation_77 (Activation) (None, 5, 5, 384) 0 batch_normalization_207[0][0]
__________________________________________________________________________________________________
activation_81 (Activation) (None, 5, 5, 384) 0 batch_normalization_211[0][0]
__________________________________________________________________________________________________
conv2d_208 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_209 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_212 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
conv2d_213 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 5, 5, 1280) 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_206 (Conv2D) (None, 5, 5, 320) 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_208 (BatchN (None, 5, 5, 384) 1152 conv2d_208[0][0]
__________________________________________________________________________________________________
batch_normalization_209 (BatchN (None, 5, 5, 384) 1152 conv2d_209[0][0]
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 5, 5, 384) 1152 conv2d_212[0][0]
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 5, 5, 384) 1152 conv2d_213[0][0]
__________________________________________________________________________________________________
conv2d_214 (Conv2D) (None, 5, 5, 192) 245760 average_pooling2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 5, 5, 320) 960 conv2d_206[0][0]
__________________________________________________________________________________________________
activation_78 (Activation) (None, 5, 5, 384) 0 batch_normalization_208[0][0]
__________________________________________________________________________________________________
activation_79 (Activation) (None, 5, 5, 384) 0 batch_normalization_209[0][0]
__________________________________________________________________________________________________
activation_82 (Activation) (None, 5, 5, 384) 0 batch_normalization_212[0][0]
__________________________________________________________________________________________________
activation_83 (Activation) (None, 5, 5, 384) 0 batch_normalization_213[0][0]
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 5, 5, 192) 576 conv2d_214[0][0]
__________________________________________________________________________________________________
activation_76 (Activation) (None, 5, 5, 320) 0 batch_normalization_206[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, 5, 5, 768) 0 activation_78[0][0]
activation_79[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, 5, 5, 768) 0 activation_82[0][0]
activation_83[0][0]
__________________________________________________________________________________________________
activation_84 (Activation) (None, 5, 5, 192) 0 batch_normalization_214[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, 5, 5, 2048) 0 activation_76[0][0]
mixed9_0[0][0]
concatenate[0][0]
activation_84[0][0]
__________________________________________________________________________________________________
conv2d_219 (Conv2D) (None, 5, 5, 448) 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 5, 5, 448) 1344 conv2d_219[0][0]
__________________________________________________________________________________________________
activation_89 (Activation) (None, 5, 5, 448) 0 batch_normalization_219[0][0]
__________________________________________________________________________________________________
conv2d_216 (Conv2D) (None, 5, 5, 384) 786432 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_220 (Conv2D) (None, 5, 5, 384) 1548288 activation_89[0][0]
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 5, 5, 384) 1152 conv2d_216[0][0]
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 5, 5, 384) 1152 conv2d_220[0][0]
__________________________________________________________________________________________________
activation_86 (Activation) (None, 5, 5, 384) 0 batch_normalization_216[0][0]
__________________________________________________________________________________________________
activation_90 (Activation) (None, 5, 5, 384) 0 batch_normalization_220[0][0]
__________________________________________________________________________________________________
conv2d_217 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_218 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_221 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
conv2d_222 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 5, 5, 2048) 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_215 (Conv2D) (None, 5, 5, 320) 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 5, 5, 384) 1152 conv2d_217[0][0]
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 5, 5, 384) 1152 conv2d_218[0][0]
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 5, 5, 384) 1152 conv2d_221[0][0]
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 5, 5, 384) 1152 conv2d_222[0][0]
__________________________________________________________________________________________________
conv2d_223 (Conv2D) (None, 5, 5, 192) 393216 average_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 5, 5, 320) 960 conv2d_215[0][0]
__________________________________________________________________________________________________
activation_87 (Activation) (None, 5, 5, 384) 0 batch_normalization_217[0][0]
__________________________________________________________________________________________________
activation_88 (Activation) (None, 5, 5, 384) 0 batch_normalization_218[0][0]
__________________________________________________________________________________________________
activation_91 (Activation) (None, 5, 5, 384) 0 batch_normalization_221[0][0]
__________________________________________________________________________________________________
activation_92 (Activation) (None, 5, 5, 384) 0 batch_normalization_222[0][0]
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 5, 5, 192) 576 conv2d_223[0][0]
__________________________________________________________________________________________________
activation_85 (Activation) (None, 5, 5, 320) 0 batch_normalization_215[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, 5, 5, 768) 0 activation_87[0][0]
activation_88[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 5, 5, 768) 0 activation_91[0][0]
activation_92[0][0]
__________________________________________________________________________________________________
activation_93 (Activation) (None, 5, 5, 192) 0 batch_normalization_223[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, 5, 5, 2048) 0 activation_85[0][0]
mixed9_1[0][0]
concatenate_1[0][0]
activation_93[0][0]
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
print(f'Inception V3 weights {len(inception_v3.weights)}')
print(f'Inception V3 trainable weights {len(inception_v3.trainable_weights)}')
print("Setting Inception V3 layers untrainable")
for layer in inception_v3.layers:
layer.trainable = False
print(f'Inception V3 edited trainable weights {len(inception_v3.trainable_weights)}')
Inception V3 weights 376 Inception V3 trainable weights 188 Setting Inception V3 layers untrainable Inception V3 edited trainable weights 0
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Input, MaxPool2D, Dropout, BatchNormalization, GlobalAveragePooling2D
x = GlobalAveragePooling2D()(inception_v3.output)
x = Dense(512, activation='relu')(x)
x = BatchNormalization()(x)
x = Dropout(0.5)(x)
x = Dense(256, activation='relu')(x)
x = BatchNormalization()(x)
x = Dense(120, activation='softmax', name ='predictions')(x)
model = Model(inputs=inception_v3.input, outputs=x)
model.summary()
Model: "functional_53"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_27 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 111, 111, 32) 864 input_27[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 111, 111, 32) 96 conv2d_130[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, 111, 111, 32) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 109, 109, 32) 9216 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 109, 109, 32) 96 conv2d_131[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 109, 109, 32) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 109, 109, 64) 18432 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 109, 109, 64) 192 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 109, 109, 64) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
max_pooling2d_126 (MaxPooling2D (None, 54, 54, 64) 0 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 54, 54, 80) 5120 max_pooling2d_126[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 54, 54, 80) 240 conv2d_133[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 54, 54, 80) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 52, 52, 192) 138240 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 52, 52, 192) 576 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 52, 52, 192) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
max_pooling2d_127 (MaxPooling2D (None, 25, 25, 192) 0 activation_4[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 25, 25, 64) 192 conv2d_138[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 25, 25, 64) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 25, 25, 48) 9216 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 25, 25, 96) 55296 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 25, 25, 48) 144 conv2d_136[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 25, 25, 96) 288 conv2d_139[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 25, 25, 48) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 25, 25, 96) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 25, 25, 192) 0 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 25, 25, 64) 76800 activation_6[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 25, 25, 96) 82944 activation_9[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 25, 25, 32) 6144 average_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 25, 25, 64) 192 conv2d_135[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 25, 25, 64) 192 conv2d_137[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 25, 25, 96) 288 conv2d_140[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 25, 25, 32) 96 conv2d_141[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 25, 25, 64) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 25, 25, 64) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 25, 25, 96) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 25, 25, 32) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, 25, 25, 256) 0 activation_5[0][0]
activation_7[0][0]
activation_10[0][0]
activation_11[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 25, 25, 64) 192 conv2d_145[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 25, 25, 64) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 25, 25, 48) 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 25, 25, 96) 55296 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 25, 25, 48) 144 conv2d_143[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 25, 25, 96) 288 conv2d_146[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 25, 25, 48) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 25, 25, 96) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 25, 25, 256) 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 25, 25, 64) 76800 activation_13[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 25, 25, 96) 82944 activation_16[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 25, 25, 64) 16384 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 25, 25, 64) 192 conv2d_142[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 25, 25, 64) 192 conv2d_144[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 25, 25, 96) 288 conv2d_147[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 25, 25, 64) 192 conv2d_148[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 25, 25, 64) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 25, 25, 64) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 25, 25, 96) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 25, 25, 64) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, 25, 25, 288) 0 activation_12[0][0]
activation_14[0][0]
activation_17[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 25, 25, 64) 192 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 25, 25, 64) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 25, 25, 48) 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 25, 25, 96) 55296 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 25, 25, 48) 144 conv2d_150[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 25, 25, 96) 288 conv2d_153[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 25, 25, 48) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 25, 25, 96) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 25, 25, 288) 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 25, 25, 64) 76800 activation_20[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 25, 25, 96) 82944 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 25, 25, 64) 18432 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 25, 25, 64) 192 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 25, 25, 64) 192 conv2d_151[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 25, 25, 96) 288 conv2d_154[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 25, 25, 64) 192 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 25, 25, 64) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 25, 25, 64) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 25, 25, 96) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 25, 25, 64) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, 25, 25, 288) 0 activation_19[0][0]
activation_21[0][0]
activation_24[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 25, 25, 64) 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 25, 25, 64) 192 conv2d_157[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 25, 25, 64) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 25, 25, 96) 55296 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 25, 25, 96) 288 conv2d_158[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 25, 25, 96) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 12, 12, 384) 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 12, 12, 96) 82944 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 12, 12, 384) 1152 conv2d_156[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 12, 12, 96) 288 conv2d_159[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 12, 12, 384) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 12, 12, 96) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
max_pooling2d_128 (MaxPooling2D (None, 12, 12, 288) 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, 12, 12, 768) 0 activation_26[0][0]
activation_29[0][0]
max_pooling2d_128[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 12, 12, 128) 384 conv2d_164[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 12, 12, 128) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 12, 12, 128) 114688 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 12, 12, 128) 384 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 12, 12, 128) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 12, 12, 128) 114688 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 12, 12, 128) 384 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 12, 12, 128) 384 conv2d_166[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 12, 12, 128) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 12, 12, 128) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 12, 12, 128) 114688 activation_31[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 12, 12, 128) 114688 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 12, 12, 128) 384 conv2d_162[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 12, 12, 128) 384 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 12, 12, 128) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 12, 12, 128) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 12, 12, 768) 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 12, 12, 192) 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 12, 12, 192) 172032 activation_32[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 12, 12, 192) 172032 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 12, 12, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 12, 12, 192) 576 conv2d_163[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 12, 12, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 12, 12, 192) 576 conv2d_169[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 12, 12, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 12, 12, 192) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 12, 12, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 12, 12, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, 12, 12, 768) 0 activation_30[0][0]
activation_33[0][0]
activation_38[0][0]
activation_39[0][0]
__________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 12, 12, 160) 480 conv2d_174[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 12, 12, 160) 0 batch_normalization_174[0][0]
__________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 12, 12, 160) 179200 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 12, 12, 160) 480 conv2d_175[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 12, 12, 160) 0 batch_normalization_175[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 12, 12, 160) 179200 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 12, 12, 160) 480 conv2d_171[0][0]
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 12, 12, 160) 480 conv2d_176[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 12, 12, 160) 0 batch_normalization_171[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 12, 12, 160) 0 batch_normalization_176[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 12, 12, 160) 179200 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 12, 12, 160) 179200 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 12, 12, 160) 480 conv2d_172[0][0]
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 12, 12, 160) 480 conv2d_177[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 12, 12, 160) 0 batch_normalization_172[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 12, 12, 160) 0 batch_normalization_177[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 12, 12, 768) 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 12, 12, 192) 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 12, 12, 192) 215040 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 12, 12, 192) 215040 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 12, 12, 192) 576 conv2d_170[0][0]
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 12, 12, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 12, 12, 192) 576 conv2d_178[0][0]
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 12, 12, 192) 576 conv2d_179[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 12, 12, 192) 0 batch_normalization_170[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 12, 12, 192) 0 batch_normalization_173[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 12, 12, 192) 0 batch_normalization_178[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 12, 12, 192) 0 batch_normalization_179[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, 12, 12, 768) 0 activation_40[0][0]
activation_43[0][0]
activation_48[0][0]
activation_49[0][0]
__________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 12, 12, 160) 480 conv2d_184[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, 12, 12, 160) 0 batch_normalization_184[0][0]
__________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 12, 12, 160) 179200 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 12, 12, 160) 480 conv2d_185[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, 12, 12, 160) 0 batch_normalization_185[0][0]
__________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 12, 12, 160) 179200 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 12, 12, 160) 480 conv2d_181[0][0]
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 12, 12, 160) 480 conv2d_186[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, 12, 12, 160) 0 batch_normalization_181[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, 12, 12, 160) 0 batch_normalization_186[0][0]
__________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 12, 12, 160) 179200 activation_51[0][0]
__________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 12, 12, 160) 179200 activation_56[0][0]
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 12, 12, 160) 480 conv2d_182[0][0]
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 12, 12, 160) 480 conv2d_187[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, 12, 12, 160) 0 batch_normalization_182[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, 12, 12, 160) 0 batch_normalization_187[0][0]
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 12, 12, 768) 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 12, 12, 192) 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 12, 12, 192) 215040 activation_52[0][0]
__________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 12, 12, 192) 215040 activation_57[0][0]
__________________________________________________________________________________________________
conv2d_189 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 12, 12, 192) 576 conv2d_180[0][0]
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 12, 12, 192) 576 conv2d_183[0][0]
__________________________________________________________________________________________________
batch_normalization_188 (BatchN (None, 12, 12, 192) 576 conv2d_188[0][0]
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 12, 12, 192) 576 conv2d_189[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 12, 12, 192) 0 batch_normalization_180[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, 12, 12, 192) 0 batch_normalization_183[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, 12, 12, 192) 0 batch_normalization_188[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, 12, 12, 192) 0 batch_normalization_189[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, 12, 12, 768) 0 activation_50[0][0]
activation_53[0][0]
activation_58[0][0]
activation_59[0][0]
__________________________________________________________________________________________________
conv2d_194 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 12, 12, 192) 576 conv2d_194[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, 12, 12, 192) 0 batch_normalization_194[0][0]
__________________________________________________________________________________________________
conv2d_195 (Conv2D) (None, 12, 12, 192) 258048 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 12, 12, 192) 576 conv2d_195[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, 12, 12, 192) 0 batch_normalization_195[0][0]
__________________________________________________________________________________________________
conv2d_191 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_196 (Conv2D) (None, 12, 12, 192) 258048 activation_65[0][0]
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 12, 12, 192) 576 conv2d_191[0][0]
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 12, 12, 192) 576 conv2d_196[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, 12, 12, 192) 0 batch_normalization_191[0][0]
__________________________________________________________________________________________________
activation_66 (Activation) (None, 12, 12, 192) 0 batch_normalization_196[0][0]
__________________________________________________________________________________________________
conv2d_192 (Conv2D) (None, 12, 12, 192) 258048 activation_61[0][0]
__________________________________________________________________________________________________
conv2d_197 (Conv2D) (None, 12, 12, 192) 258048 activation_66[0][0]
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 12, 12, 192) 576 conv2d_192[0][0]
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 12, 12, 192) 576 conv2d_197[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, 12, 12, 192) 0 batch_normalization_192[0][0]
__________________________________________________________________________________________________
activation_67 (Activation) (None, 12, 12, 192) 0 batch_normalization_197[0][0]
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 12, 12, 768) 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_190 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_193 (Conv2D) (None, 12, 12, 192) 258048 activation_62[0][0]
__________________________________________________________________________________________________
conv2d_198 (Conv2D) (None, 12, 12, 192) 258048 activation_67[0][0]
__________________________________________________________________________________________________
conv2d_199 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 12, 12, 192) 576 conv2d_190[0][0]
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 12, 12, 192) 576 conv2d_193[0][0]
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 12, 12, 192) 576 conv2d_198[0][0]
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 12, 12, 192) 576 conv2d_199[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, 12, 12, 192) 0 batch_normalization_190[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, 12, 12, 192) 0 batch_normalization_193[0][0]
__________________________________________________________________________________________________
activation_68 (Activation) (None, 12, 12, 192) 0 batch_normalization_198[0][0]
__________________________________________________________________________________________________
activation_69 (Activation) (None, 12, 12, 192) 0 batch_normalization_199[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, 12, 12, 768) 0 activation_60[0][0]
activation_63[0][0]
activation_68[0][0]
activation_69[0][0]
__________________________________________________________________________________________________
conv2d_202 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 12, 12, 192) 576 conv2d_202[0][0]
__________________________________________________________________________________________________
activation_72 (Activation) (None, 12, 12, 192) 0 batch_normalization_202[0][0]
__________________________________________________________________________________________________
conv2d_203 (Conv2D) (None, 12, 12, 192) 258048 activation_72[0][0]
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 12, 12, 192) 576 conv2d_203[0][0]
__________________________________________________________________________________________________
activation_73 (Activation) (None, 12, 12, 192) 0 batch_normalization_203[0][0]
__________________________________________________________________________________________________
conv2d_200 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_204 (Conv2D) (None, 12, 12, 192) 258048 activation_73[0][0]
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 12, 12, 192) 576 conv2d_200[0][0]
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 12, 12, 192) 576 conv2d_204[0][0]
__________________________________________________________________________________________________
activation_70 (Activation) (None, 12, 12, 192) 0 batch_normalization_200[0][0]
__________________________________________________________________________________________________
activation_74 (Activation) (None, 12, 12, 192) 0 batch_normalization_204[0][0]
__________________________________________________________________________________________________
conv2d_201 (Conv2D) (None, 5, 5, 320) 552960 activation_70[0][0]
__________________________________________________________________________________________________
conv2d_205 (Conv2D) (None, 5, 5, 192) 331776 activation_74[0][0]
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 5, 5, 320) 960 conv2d_201[0][0]
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 5, 5, 192) 576 conv2d_205[0][0]
__________________________________________________________________________________________________
activation_71 (Activation) (None, 5, 5, 320) 0 batch_normalization_201[0][0]
__________________________________________________________________________________________________
activation_75 (Activation) (None, 5, 5, 192) 0 batch_normalization_205[0][0]
__________________________________________________________________________________________________
max_pooling2d_129 (MaxPooling2D (None, 5, 5, 768) 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, 5, 5, 1280) 0 activation_71[0][0]
activation_75[0][0]
max_pooling2d_129[0][0]
__________________________________________________________________________________________________
conv2d_210 (Conv2D) (None, 5, 5, 448) 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_210 (BatchN (None, 5, 5, 448) 1344 conv2d_210[0][0]
__________________________________________________________________________________________________
activation_80 (Activation) (None, 5, 5, 448) 0 batch_normalization_210[0][0]
__________________________________________________________________________________________________
conv2d_207 (Conv2D) (None, 5, 5, 384) 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_211 (Conv2D) (None, 5, 5, 384) 1548288 activation_80[0][0]
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 5, 5, 384) 1152 conv2d_207[0][0]
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 5, 5, 384) 1152 conv2d_211[0][0]
__________________________________________________________________________________________________
activation_77 (Activation) (None, 5, 5, 384) 0 batch_normalization_207[0][0]
__________________________________________________________________________________________________
activation_81 (Activation) (None, 5, 5, 384) 0 batch_normalization_211[0][0]
__________________________________________________________________________________________________
conv2d_208 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_209 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_212 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
conv2d_213 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 5, 5, 1280) 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_206 (Conv2D) (None, 5, 5, 320) 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_208 (BatchN (None, 5, 5, 384) 1152 conv2d_208[0][0]
__________________________________________________________________________________________________
batch_normalization_209 (BatchN (None, 5, 5, 384) 1152 conv2d_209[0][0]
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 5, 5, 384) 1152 conv2d_212[0][0]
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 5, 5, 384) 1152 conv2d_213[0][0]
__________________________________________________________________________________________________
conv2d_214 (Conv2D) (None, 5, 5, 192) 245760 average_pooling2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 5, 5, 320) 960 conv2d_206[0][0]
__________________________________________________________________________________________________
activation_78 (Activation) (None, 5, 5, 384) 0 batch_normalization_208[0][0]
__________________________________________________________________________________________________
activation_79 (Activation) (None, 5, 5, 384) 0 batch_normalization_209[0][0]
__________________________________________________________________________________________________
activation_82 (Activation) (None, 5, 5, 384) 0 batch_normalization_212[0][0]
__________________________________________________________________________________________________
activation_83 (Activation) (None, 5, 5, 384) 0 batch_normalization_213[0][0]
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 5, 5, 192) 576 conv2d_214[0][0]
__________________________________________________________________________________________________
activation_76 (Activation) (None, 5, 5, 320) 0 batch_normalization_206[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, 5, 5, 768) 0 activation_78[0][0]
activation_79[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, 5, 5, 768) 0 activation_82[0][0]
activation_83[0][0]
__________________________________________________________________________________________________
activation_84 (Activation) (None, 5, 5, 192) 0 batch_normalization_214[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, 5, 5, 2048) 0 activation_76[0][0]
mixed9_0[0][0]
concatenate[0][0]
activation_84[0][0]
__________________________________________________________________________________________________
conv2d_219 (Conv2D) (None, 5, 5, 448) 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 5, 5, 448) 1344 conv2d_219[0][0]
__________________________________________________________________________________________________
activation_89 (Activation) (None, 5, 5, 448) 0 batch_normalization_219[0][0]
__________________________________________________________________________________________________
conv2d_216 (Conv2D) (None, 5, 5, 384) 786432 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_220 (Conv2D) (None, 5, 5, 384) 1548288 activation_89[0][0]
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 5, 5, 384) 1152 conv2d_216[0][0]
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 5, 5, 384) 1152 conv2d_220[0][0]
__________________________________________________________________________________________________
activation_86 (Activation) (None, 5, 5, 384) 0 batch_normalization_216[0][0]
__________________________________________________________________________________________________
activation_90 (Activation) (None, 5, 5, 384) 0 batch_normalization_220[0][0]
__________________________________________________________________________________________________
conv2d_217 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_218 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_221 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
conv2d_222 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 5, 5, 2048) 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_215 (Conv2D) (None, 5, 5, 320) 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 5, 5, 384) 1152 conv2d_217[0][0]
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 5, 5, 384) 1152 conv2d_218[0][0]
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 5, 5, 384) 1152 conv2d_221[0][0]
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 5, 5, 384) 1152 conv2d_222[0][0]
__________________________________________________________________________________________________
conv2d_223 (Conv2D) (None, 5, 5, 192) 393216 average_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 5, 5, 320) 960 conv2d_215[0][0]
__________________________________________________________________________________________________
activation_87 (Activation) (None, 5, 5, 384) 0 batch_normalization_217[0][0]
__________________________________________________________________________________________________
activation_88 (Activation) (None, 5, 5, 384) 0 batch_normalization_218[0][0]
__________________________________________________________________________________________________
activation_91 (Activation) (None, 5, 5, 384) 0 batch_normalization_221[0][0]
__________________________________________________________________________________________________
activation_92 (Activation) (None, 5, 5, 384) 0 batch_normalization_222[0][0]
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 5, 5, 192) 576 conv2d_223[0][0]
__________________________________________________________________________________________________
activation_85 (Activation) (None, 5, 5, 320) 0 batch_normalization_215[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, 5, 5, 768) 0 activation_87[0][0]
activation_88[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 5, 5, 768) 0 activation_91[0][0]
activation_92[0][0]
__________________________________________________________________________________________________
activation_93 (Activation) (None, 5, 5, 192) 0 batch_normalization_223[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, 5, 5, 2048) 0 activation_85[0][0]
mixed9_1[0][0]
concatenate_1[0][0]
activation_93[0][0]
__________________________________________________________________________________________________
global_average_pooling2d_26 (Gl (None, 2048) 0 mixed10[0][0]
__________________________________________________________________________________________________
dense_78 (Dense) (None, 512) 1049088 global_average_pooling2d_26[0][0]
__________________________________________________________________________________________________
batch_normalization_224 (BatchN (None, 512) 2048 dense_78[0][0]
__________________________________________________________________________________________________
dropout_52 (Dropout) (None, 512) 0 batch_normalization_224[0][0]
__________________________________________________________________________________________________
dense_79 (Dense) (None, 256) 131328 dropout_52[0][0]
__________________________________________________________________________________________________
batch_normalization_225 (BatchN (None, 256) 1024 dense_79[0][0]
__________________________________________________________________________________________________
predictions (Dense) (None, 120) 30840 batch_normalization_225[0][0]
==================================================================================================
Total params: 23,017,112
Trainable params: 22,981,144
Non-trainable params: 35,968
__________________________________________________________________________________________________
print(f'Input shape: {model.layers[0].input_shape}')
print(f'Output shape: {model.layers[-1].output_shape}')
Input shape: [(None, 224, 224, 3)] Output shape: (None, 120)
from tensorflow.keras.preprocessing.image import load_img
from sklearn.preprocessing import OneHotEncoder
from sklearn.utils import shuffle
from tqdm import tqdm
def load_train_images():
data, labels, filenames = [], [], []
for idx, row in tqdm(labeled_data.iterrows()):
sample = load_img(join(data_dir, 'train', row['id']), color_mode='rgb', target_size=(224,224))
sample = np.array(sample)
data.append(sample)
labels.append(row['breed'])
filenames.append(row['id'])
X, y, filenames = np.array(data), np.array(labels), np.array(filenames)
return shuffle(X, y, filenames, random_state=0)
X_train, y_train, filenames = load_train_images()
10222it [00:25, 393.25it/s]
from sklearn.model_selection import train_test_split
def create_startisfied_train_test_split(X, y, filenames):
return train_test_split(np.array(list(zip(X, filenames))), y, test_size=0.2, random_state=0, stratify=y)
X_train_samples, X_valid_samples, y_train, y_valid = create_startisfied_train_test_split(X_train, y_train, filenames)
X_valid, valid_filenames = zip(*X_valid_samples)
X_train, train_filenames = zip(*X_train_samples)
X_valid = np.array([x for x in X_valid])
X_train = np.array([x for x in X_train])
C:\Users\Itay Bouganim\AppData\Roaming\Python\Python37\site-packages\ipykernel_launcher.py:4: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray after removing the cwd from sys.path.
8177 8177
print(f'Validation labels stratisfied counts:')
unique, counts = np.unique(y_valid, return_counts=True)
print(list(zip(unique, counts))[:10])
y_valid_encoded = one_hot_encode(y_valid)
print(f'\n\nTrain labels stratisfied counts:')
unique, counts = np.unique(y_train, return_counts=True)
print(list(zip(unique, counts))[:10])
y_train_encoded = one_hot_encode(y_train)
Validation labels stratisfied counts:
[('affenpinscher', 16), ('afghan_hound', 23), ('african_hunting_dog', 17), ('airedale', 22), ('american_staffordshire_terrier', 15), ('appenzeller', 16), ('australian_terrier', 21), ('basenji', 22), ('basset', 16), ('beagle', 21)]
Train labels stratisfied counts:
[('affenpinscher', 64), ('afghan_hound', 93), ('african_hunting_dog', 69), ('airedale', 85), ('american_staffordshire_terrier', 59), ('appenzeller', 62), ('australian_terrier', 81), ('basenji', 88), ('basset', 66), ('beagle', 84)]
y_train = y_train_encoded
y_valid = y_valid_encoded
print('Training one-hot encoded outputs samples:')
print(y_train[np.random.choice(y_train.shape[0], 3, replace=False)])
print('\n\nValidation one-hot encoded outputs samples:')
print(y_valid[np.random.choice(y_valid.shape[0], 3, replace=False)])
Training one-hot encoded outputs samples: [[1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]] Validation one-hot encoded outputs samples: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0.]]
from tensorflow.keras.preprocessing.image import ImageDataGenerator
aug_image_gen = ImageDataGenerator(rescale=1./255,
shear_range=0.2,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
fill_mode='nearest',
zoom_range=0.2,
horizontal_flip=True)
valid_image_gen = ImageDataGenerator(rescale=1./255)
train_generator = aug_image_gen.flow(
x=X_train,
y=y_train,
batch_size=BATCH_SIZE,
shuffle=True,
seed=42,
)
valid_generator = valid_image_gen.flow(
x=X_valid,
y=y_valid,
batch_size=BATCH_SIZE,
shuffle=True,
seed=42,
)
model.compile(loss='categorical_crossentropy',optimizer=Adam(learning_rate=0.001), metrics=['accuracy'])
inception_hist = model.fit(x=train_generator, validation_data=valid_generator, callbacks=get_callbacks(saved_model_name='inceptionv3'), epochs=20, verbose=2)
Epoch 1/20 Epoch 00001: val_accuracy improved from -inf to 0.74132, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 131s - loss: 2.1367 - accuracy: 0.4858 - val_loss: 0.8690 - val_accuracy: 0.7413 Epoch 2/20 Epoch 00002: val_accuracy improved from 0.74132 to 0.74377, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 127s - loss: 1.4722 - accuracy: 0.5940 - val_loss: 0.8556 - val_accuracy: 0.7438 Epoch 3/20 Epoch 00003: val_accuracy improved from 0.74377 to 0.75452, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 127s - loss: 1.3904 - accuracy: 0.6213 - val_loss: 0.8131 - val_accuracy: 0.7545 Epoch 4/20 Epoch 00004: val_accuracy improved from 0.75452 to 0.76675, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 132s - loss: 1.3107 - accuracy: 0.6275 - val_loss: 0.7592 - val_accuracy: 0.7667 Epoch 5/20 Epoch 00005: val_accuracy improved from 0.76675 to 0.77408, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 139s - loss: 1.2774 - accuracy: 0.6385 - val_loss: 0.7648 - val_accuracy: 0.7741 Epoch 6/20 Epoch 00006: val_accuracy improved from 0.77408 to 0.77995, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 138s - loss: 1.2627 - accuracy: 0.6392 - val_loss: 0.7282 - val_accuracy: 0.7800 Epoch 7/20 Epoch 00007: val_accuracy did not improve from 0.77995 409/409 - 137s - loss: 1.2114 - accuracy: 0.6572 - val_loss: 0.7470 - val_accuracy: 0.7751 Epoch 8/20 Epoch 00008: val_accuracy improved from 0.77995 to 0.78240, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 139s - loss: 1.1791 - accuracy: 0.6642 - val_loss: 0.7461 - val_accuracy: 0.7824 Epoch 9/20 Epoch 00009: val_accuracy improved from 0.78240 to 0.78386, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 138s - loss: 1.1664 - accuracy: 0.6634 - val_loss: 0.7387 - val_accuracy: 0.7839 Epoch 10/20 Epoch 00010: val_accuracy did not improve from 0.78386 409/409 - 138s - loss: 1.1413 - accuracy: 0.6771 - val_loss: 0.7248 - val_accuracy: 0.7834 Epoch 11/20 Epoch 00011: val_accuracy improved from 0.78386 to 0.79022, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 140s - loss: 1.1338 - accuracy: 0.6773 - val_loss: 0.6945 - val_accuracy: 0.7902 Epoch 12/20 Epoch 00012: val_accuracy improved from 0.79022 to 0.79071, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 141s - loss: 1.1008 - accuracy: 0.6813 - val_loss: 0.7025 - val_accuracy: 0.7907 Epoch 13/20 Epoch 00013: val_accuracy did not improve from 0.79071 409/409 - 139s - loss: 1.1341 - accuracy: 0.6710 - val_loss: 0.7088 - val_accuracy: 0.7868 Epoch 14/20 Epoch 00014: val_accuracy improved from 0.79071 to 0.79120, saving model to C:\My Files\School\Programming\Projects\Deep Learning\Assignments\Assignment1/data/dog-breed/saved_models\model_inceptionv3.h5 409/409 - 140s - loss: 1.0859 - accuracy: 0.6874 - val_loss: 0.7187 - val_accuracy: 0.7912 Epoch 15/20 Epoch 00015: val_accuracy did not improve from 0.79120 409/409 - 140s - loss: 1.0976 - accuracy: 0.6818 - val_loss: 0.6951 - val_accuracy: 0.7858 Epoch 16/20 Epoch 00016: val_accuracy did not improve from 0.79120 409/409 - 140s - loss: 1.0807 - accuracy: 0.6868 - val_loss: 0.7329 - val_accuracy: 0.7819 Epoch 17/20 Epoch 00017: val_accuracy did not improve from 0.79120 409/409 - 140s - loss: 1.0716 - accuracy: 0.6874 - val_loss: 0.7228 - val_accuracy: 0.7824 Epoch 18/20 Epoch 00018: val_accuracy did not improve from 0.79120 409/409 - 142s - loss: 1.0681 - accuracy: 0.6911 - val_loss: 0.7072 - val_accuracy: 0.7824 Epoch 19/20 Epoch 00019: val_accuracy did not improve from 0.79120 409/409 - 139s - loss: 1.0679 - accuracy: 0.6848 - val_loss: 0.8488 - val_accuracy: 0.7785 Epoch 00019: early stopping
plot_model_stats(inception_hist)
def print_model_stats(h):
print(f"Last epoch accuracy: {round(statistics.mean(h.history['accuracy'][-1:]) * 100, 2)}%")
print(f"Last epoch loss: {statistics.mean(h.history['loss'][-1:])}")
print(f"Last epoch validation accuracy: {round(statistics.mean(h.history['val_accuracy'][-1:]) * 100, 2)}%")
print(f"Last epoch validation loss: {statistics.mean(h.history['val_loss'][-1:])}")
print_model_stats(inception_hist)
Last epoch accuracy: 68.48% Last epoch loss: 1.0679259300231934 Last epoch validation accuracy: 77.85% Last epoch validation loss: 0.8487804532051086
def calculate_confusion_matrix_train_test(saved_model_prefix, dims, model):
model.load_weights(join(model_save_dir, f"model_{saved_model_prefix}.h5"))
y_pred = model.predict(X_valid / 255.)
y_pred_classes = np.argmax(y_pred,axis = 1)
y_true = np.argmax(y_valid, axis=1)
y_pred_labeled = [{'pred_label': index_to_label[pred], 'true_label': index_to_label[y_true[i]],
'filename': valid_filenames[i], 'perc': np.amax(y_pred[i])} for i, pred in enumerate(y_pred_classes)]
correct_pred = list(filter(lambda pred: pred['pred_label'] == pred['true_label'], y_pred_labeled))
worst_wrong_pred = sorted(list(filter(lambda pred: pred['pred_label'] != pred['true_label'], y_pred_labeled)), key=lambda e: e['perc'], reverse=True)[:10]
correct_perc = len(correct_pred) / len(valid_filenames)
return {'confusion':confusion_matrix(y_true, y_pred_classes), 'correct': sorted(correct_pred, key=lambda e: e['perc'], reverse=True)[:10],
'wrong': worst_wrong_pred, 'correct_perc': correct_perc}
inceptionv3_cnn_metrics = calculate_confusion_matrix_train_test(saved_model_prefix='inceptionv3', dims=224, model=model)
plot_confusion_matrix(inceptionv3_cnn_metrics['confusion'], labels=labels, title="Confusion Matrix InceptionV3 based CNN")
read_and_plot_correct_wrong(correct=inceptionv3_cnn_metrics['correct'], wrong=inceptionv3_cnn_metrics['wrong'])
We can see by the predictions samples that we got big imporvement by using InceptionV3.
Most of the wrong predictions that we got are hard to seperate just by looking at the pictures.
The wrong predictions dog breeds are very simillar looking in terms of shape, hair, color and size.
We can also see that the correct and wrong predictions are very high for the top samples, this can indicate that our model is very decisive this time around.
print('Correct percentage: {}%'.format(round(inceptionv3_cnn_metrics['correct_perc'] * 100, 2)))
Correct percentage: 79.12%
def create_submission_file_inceptionv3(X_test, test_ids, name, dims, saved_model_prefix, model):
model.load_weights(join(model_save_dir, f"{saved_model_prefix}.h5"))
predictions = model.predict(X_test, batch_size=BATCH_SIZE, verbose=2)
submission_res = pd.DataFrame(data=predictions, index=test_ids, columns=np.array([*label_to_index]))
submission_res = submission_res.sort_index(axis=1)
submission_res.index.name = 'id'
submission_res.to_csv(join(data_dir, name), encoding='utf-8', index=True)
submission_data = pd.read_csv(join(data_dir, name))
return submission_data
X_test, test_ids = get_test_samples(preprocess_func=lambda s: s / 225., dims=224, dtype=np.float64)
Loading test data
0%| | 4/10357 [00:00<05:22, 32.09it/s]
(10357, 224, 224, 3)
100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 10357/10357 [01:54<00:00, 90.84it/s]
submission_data = create_submission_file_inceptionv3(X_test=X_test, test_ids=test_ids, name='inceptionv3_submission.csv', dims=224,
saved_model_prefix='model_inceptionv3', model=model)
submission_data
518/518 - 92s
| id | affenpinscher | afghan_hound | african_hunting_dog | airedale | american_staffordshire_terrier | appenzeller | australian_terrier | basenji | basset | ... | toy_poodle | toy_terrier | vizsla | walker_hound | weimaraner | welsh_springer_spaniel | west_highland_white_terrier | whippet | wire-haired_fox_terrier | yorkshire_terrier | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 000621fb3cbb32d8935728e48679680e | 0.000122 | 1.997432e-04 | 1.512205e-06 | 1.535732e-07 | 1.690818e-08 | 1.354579e-07 | 2.576929e-06 | 8.333871e-08 | 8.343577e-07 | ... | 1.402244e-06 | 3.958630e-07 | 1.688202e-06 | 2.035644e-07 | 7.043890e-07 | 7.745700e-07 | 4.252997e-07 | 4.304712e-08 | 5.056384e-06 | 1.437696e-05 |
| 1 | 00102ee9d8eb90812350685311fe5890 | 0.000001 | 1.953093e-09 | 7.254951e-08 | 2.198116e-07 | 5.376096e-06 | 8.215196e-07 | 8.389158e-08 | 3.947013e-07 | 4.224020e-07 | ... | 6.629390e-07 | 2.064723e-06 | 2.083986e-07 | 1.468025e-05 | 1.892292e-05 | 3.720663e-07 | 2.547189e-04 | 2.666109e-07 | 1.154604e-06 | 3.045920e-07 |
| 2 | 0012a730dfa437f5f3613fb75efcd4ce | 0.000004 | 1.249285e-01 | 4.775828e-04 | 4.788253e-05 | 3.150315e-05 | 6.404155e-06 | 1.602149e-04 | 4.267707e-06 | 7.186257e-05 | ... | 2.025650e-06 | 5.285336e-06 | 3.752922e-04 | 2.144447e-04 | 1.443993e-03 | 1.460943e-04 | 2.915576e-06 | 2.335792e-02 | 3.213605e-05 | 5.789116e-04 |
| 3 | 001510bc8570bbeee98c8d80c8a95ec1 | 0.003182 | 2.659749e-03 | 8.035110e-04 | 2.513948e-03 | 1.551429e-04 | 1.451453e-01 | 1.311704e-04 | 2.154395e-01 | 9.541432e-03 | ... | 1.987892e-03 | 4.620255e-03 | 1.001725e-03 | 5.018954e-03 | 3.078051e-04 | 3.698469e-05 | 6.307058e-03 | 1.706925e-02 | 7.465795e-04 | 1.395639e-03 |
| 4 | 001a5f3114548acdefa3d4da05474c2e | 0.038793 | 5.680982e-05 | 7.030024e-05 | 5.066290e-07 | 8.997701e-06 | 4.543847e-06 | 4.656462e-04 | 9.807300e-06 | 2.087053e-05 | ... | 3.378397e-05 | 2.376743e-05 | 1.499276e-05 | 3.978268e-05 | 1.857818e-05 | 2.180684e-05 | 4.077354e-04 | 2.065280e-06 | 2.855959e-05 | 8.409017e-04 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 10352 | ffeda8623d4eee33c6d1156a2ecbfcf8 | 0.000031 | 2.386003e-03 | 5.829728e-06 | 2.402039e-05 | 2.045461e-04 | 5.810452e-06 | 2.231237e-05 | 2.824520e-06 | 3.555507e-05 | ... | 1.452536e-02 | 3.335844e-06 | 2.136583e-05 | 4.973574e-06 | 1.901992e-04 | 2.048868e-06 | 7.087192e-05 | 7.844208e-05 | 9.225511e-05 | 7.228808e-05 |
| 10353 | fff1ec9e6e413275984966f745a313b0 | 0.000006 | 5.177933e-05 | 2.534736e-05 | 9.219802e-06 | 2.342470e-03 | 5.331960e-05 | 1.312880e-06 | 6.529973e-06 | 3.310814e-03 | ... | 3.504906e-06 | 2.401402e-06 | 2.800407e-05 | 2.561131e-04 | 5.582420e-01 | 1.237938e-05 | 7.196153e-05 | 6.173470e-03 | 2.687479e-06 | 1.332929e-05 |
| 10354 | fff74b59b758bbbf13a5793182a9bbe4 | 0.002696 | 1.533445e-04 | 2.977253e-02 | 2.218070e-06 | 9.650048e-06 | 3.064542e-05 | 3.514635e-03 | 1.043879e-04 | 1.264548e-04 | ... | 3.265220e-05 | 1.563324e-03 | 1.670576e-04 | 3.491375e-04 | 4.664837e-05 | 1.655976e-04 | 3.729510e-05 | 4.796241e-05 | 8.349876e-06 | 3.941266e-03 |
| 10355 | fff7d50d848e8014ac1e9172dc6762a3 | 0.000405 | 2.674073e-06 | 1.323689e-06 | 1.253531e-07 | 1.099248e-06 | 3.779833e-06 | 2.618770e-05 | 4.746941e-06 | 2.005733e-05 | ... | 2.473992e-05 | 6.755071e-07 | 5.214762e-06 | 1.360822e-06 | 2.376353e-07 | 8.088533e-07 | 7.062133e-05 | 6.302553e-07 | 9.513592e-07 | 2.396691e-05 |
| 10356 | fffbff22c1f51e3dc80c4bf04089545b | 0.000002 | 2.298069e-04 | 4.185158e-04 | 5.420711e-07 | 7.502253e-06 | 1.569099e-06 | 8.599427e-05 | 1.706652e-07 | 2.876660e-06 | ... | 1.283893e-07 | 5.182729e-07 | 2.487918e-05 | 6.632046e-06 | 1.733085e-04 | 3.699239e-06 | 3.002746e-06 | 9.439946e-04 | 2.325990e-06 | 1.942212e-05 |
10357 rows × 121 columns
Support-vector machines (SVMs, also support-vector networks) are supervised learning models with associated learning algorithms that analyze data used for classification and regression analysis.
More formally, a support-vector machine constructs a hyperplane or set of hyperplanes in a high- or infinite-dimensional space, which can be used for classification, regression, or other tasks like outliers detection.
A good separation is achieved by the hyperplane that has the largest distance to the nearest training-data point of any class (so-called functional margin), since in general the larger the margin, the lower the generalization error of the classifier.
display_image("svm.png")
display_image("svm2.png")
# Load custom InceptionV3 model weights
inceptionv3_model = model
inceptionv3_model.load_weights(join(model_save_dir, f"model_inceptionv3.h5"))
from keras import backend as K
for layer_idx in range(len(inceptionv3_model.layers)):
print(layer_idx, inceptionv3_model.layers[layer_idx])
0 <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x00000229633DEE88> 1 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229633DE508> 2 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022958CCC308> 3 <tensorflow.python.keras.layers.core.Activation object at 0x0000022958CCC3C8> 4 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F1B2648> 5 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002296371C748> 6 <tensorflow.python.keras.layers.core.Activation object at 0x0000022966EDE948> 7 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022966EDE288> 8 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DECD3C8> 9 <tensorflow.python.keras.layers.core.Activation object at 0x0000022966DE6108> 10 <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x0000022966DE6C48> 11 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022967479F48> 12 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B2DD8C8> 13 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B2DD3C8> 14 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295E1E3148> 15 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002296347F6C8> 16 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963468308> 17 <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x0000022963468088> 18 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022960C41948> 19 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022967030FC8> 20 <tensorflow.python.keras.layers.core.Activation object at 0x000002296705BBC8> 21 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295C4BE908> 22 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002296705B8C8> 23 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295C4ABF88> 24 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002296703AF48> 25 <tensorflow.python.keras.layers.core.Activation object at 0x000002295C4A8848> 26 <tensorflow.python.keras.layers.core.Activation object at 0x0000022967036848> 27 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x0000022961003948> 28 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002296346B448> 29 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295C4A8908> 30 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022967036908> 31 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F0BEE48> 32 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002296346BB48> 33 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022960C55F88> 34 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022960FFFF88> 35 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F0BA548> 36 <tensorflow.python.keras.layers.core.Activation object at 0x000002295C4BE848> 37 <tensorflow.python.keras.layers.core.Activation object at 0x0000022960C41888> 38 <tensorflow.python.keras.layers.core.Activation object at 0x0000022961003888> 39 <tensorflow.python.keras.layers.core.Activation object at 0x000002295FE8D048> 40 <tensorflow.python.keras.layers.merge.Concatenate object at 0x000002295F0B6188> 41 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D332E48> 42 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D334888> 43 <tensorflow.python.keras.layers.core.Activation object at 0x000002295C0835C8> 44 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295CEA6E08> 45 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295C086388> 46 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295CEDE848> 47 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295C071888> 48 <tensorflow.python.keras.layers.core.Activation object at 0x000002295CEC0588> 49 <tensorflow.python.keras.layers.core.Activation object at 0x000002295C08E5C8> 50 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x0000022961225888> 51 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F0DF4C8> 52 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D330F88> 53 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295C074E48> 54 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229611F0788> 55 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F0CB888> 56 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D353348> 57 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295C053888> 58 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229611F0A88> 59 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F0CBA08> 60 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D347888> 61 <tensorflow.python.keras.layers.core.Activation object at 0x0000022961202608> 62 <tensorflow.python.keras.layers.core.Activation object at 0x000002296121AD08> 63 <tensorflow.python.keras.layers.merge.Concatenate object at 0x000002296121AC08> 64 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295C29EAC8> 65 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295C2C61C8> 66 <tensorflow.python.keras.layers.core.Activation object at 0x000002295C2C6DC8> 67 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295E1F6B48> 68 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295C2C6B08> 69 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295E1DE188> 70 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B308B48> 71 <tensorflow.python.keras.layers.core.Activation object at 0x000002295E1DED88> 72 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B3086C8> 73 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x000002295B2E8B88> 74 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295E209DC8> 75 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295E1DEAC8> 76 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295B308448> 77 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229674C00C8> 78 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295E1F61C8> 79 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295C29E188> 80 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B2E8208> 81 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229674C07C8> 82 <tensorflow.python.keras.layers.core.Activation object at 0x000002295E1F6E08> 83 <tensorflow.python.keras.layers.core.Activation object at 0x000002295C29ED88> 84 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B2E8548> 85 <tensorflow.python.keras.layers.core.Activation object at 0x00000229674E64C8> 86 <tensorflow.python.keras.layers.merge.Concatenate object at 0x00000229674E1188> 87 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D667048> 88 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D669A08> 89 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D6961C8> 90 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D68A248> 91 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D693A08> 92 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D665788> 93 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229674CA788> 94 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D671048> 95 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229674B2B48> 96 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963F86A48> 97 <tensorflow.python.keras.layers.core.Activation object at 0x00000229674C4448> 98 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963F855C8> 99 <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x0000022963FAFCC8> 100 <tensorflow.python.keras.layers.merge.Concatenate object at 0x0000022963FADA88> 101 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022967471A88> 102 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229667ACD48> 103 <tensorflow.python.keras.layers.core.Activation object at 0x0000022966780A48> 104 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229667A6D08> 105 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229667AFB48> 106 <tensorflow.python.keras.layers.core.Activation object at 0x0000022966799C48> 107 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963FA0BC8> 108 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022966799988> 109 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963F890C8> 110 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022966776B88> 111 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963F89CC8> 112 <tensorflow.python.keras.layers.core.Activation object at 0x000002295CD4AA88> 113 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963F89A08> 114 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295CD4A9C8> 115 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002296748B0C8> 116 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295CD34088> 117 <tensorflow.python.keras.layers.core.Activation object at 0x000002296748BD08> 118 <tensorflow.python.keras.layers.core.Activation object at 0x000002295CD34CC8> 119 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x000002295CD35A48> 120 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963FAD508> 121 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002296748BA08> 122 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295CD34A08> 123 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229675CFE88> 124 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963FA0208> 125 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022967471108> 126 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295CD350C8> 127 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229675A3588> 128 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963FA0CC8> 129 <tensorflow.python.keras.layers.core.Activation object at 0x0000022967471D48> 130 <tensorflow.python.keras.layers.core.Activation object at 0x000002295CD35D08> 131 <tensorflow.python.keras.layers.core.Activation object at 0x00000229675BE288> 132 <tensorflow.python.keras.layers.merge.Concatenate object at 0x00000229675CBE48> 133 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022959D51B08> 134 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022959D432C8> 135 <tensorflow.python.keras.layers.core.Activation object at 0x0000022959D43D08> 136 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022959D43C88> 137 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022959D65188> 138 <tensorflow.python.keras.layers.core.Activation object at 0x0000022959D65DC8> 139 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229675AB148> 140 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022959D65B08> 141 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DEE9648> 142 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002293F269088> 143 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DEE9748> 144 <tensorflow.python.keras.layers.core.Activation object at 0x000002293F269988> 145 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295DF020C8> 146 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002293F269BC8> 147 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DEF2508> 148 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002293F23BF88> 149 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DEF2E88> 150 <tensorflow.python.keras.layers.core.Activation object at 0x000002293F25D888> 151 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x0000022960A7A288> 152 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229675D4508> 153 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295DF05FC8> 154 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002293F25D948> 155 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022960A8DC48> 156 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229675AC788> 157 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DF133C8> 158 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022960A943C8> 159 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022960A89348> 160 <tensorflow.python.keras.layers.core.Activation object at 0x00000229675AB208> 161 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DF13D48> 162 <tensorflow.python.keras.layers.core.Activation object at 0x0000022960A7A408> 163 <tensorflow.python.keras.layers.core.Activation object at 0x0000022960A961C8> 164 <tensorflow.python.keras.layers.merge.Concatenate object at 0x0000022960A739C8> 165 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F86DB88> 166 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F865108> 167 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F865D08> 168 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F865A48> 169 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022959E17FC8> 170 <tensorflow.python.keras.layers.core.Activation object at 0x0000022959E0A8C8> 171 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022960A91208> 172 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022959E0A988> 173 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229636A9448> 174 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022959E25EC8> 175 <tensorflow.python.keras.layers.core.Activation object at 0x00000229636A9DC8> 176 <tensorflow.python.keras.layers.core.Activation object at 0x0000022959E1B7C8> 177 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963697F88> 178 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022959E1B888> 179 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229636BC308> 180 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F1C6DC8> 181 <tensorflow.python.keras.layers.core.Activation object at 0x00000229636BCD88> 182 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F1CF6C8> 183 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x0000022960E6D188> 184 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022960A82308> 185 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229636BCD08> 186 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F1DD408> 187 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022960E5EA88> 188 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022960A7E588> 189 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F86D208> 190 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F1D5CC8> 191 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022960E44148> 192 <tensorflow.python.keras.layers.core.Activation object at 0x0000022960A7EFC8> 193 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F86D548> 194 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F1E25C8> 195 <tensorflow.python.keras.layers.core.Activation object at 0x0000022960E44F48> 196 <tensorflow.python.keras.layers.merge.Concatenate object at 0x0000022960E36A48> 197 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295DC43A08> 198 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295E3A1F48> 199 <tensorflow.python.keras.layers.core.Activation object at 0x000002295E3AE808> 200 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295E3AE8C8> 201 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295E3C6E08> 202 <tensorflow.python.keras.layers.core.Activation object at 0x000002295E3B3708> 203 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002294961EF08> 204 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295E3D6608> 205 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229495FB288> 206 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B58CE88> 207 <tensorflow.python.keras.layers.core.Activation object at 0x00000229495FBD08> 208 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B584348> 209 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229495FBC08> 210 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295B58A248> 211 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DC5B148> 212 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B55DC08> 213 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DC5BD88> 214 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B553508> 215 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x0000022963F36048> 216 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022960E3A2C8> 217 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295DC5BAC8> 218 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963F5F288> 219 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963F388C8> 220 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229496033C8> 221 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DC78B88> 222 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963F57B08> 223 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963F581C8> 224 <tensorflow.python.keras.layers.core.Activation object at 0x0000022949603D48> 225 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DC43948> 226 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963F51408> 227 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963F58E08> 228 <tensorflow.python.keras.layers.merge.Concatenate object at 0x0000022963F58D88> 229 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022949761BC8> 230 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229496C8F88> 231 <tensorflow.python.keras.layers.core.Activation object at 0x00000229496C5848> 232 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229496C5908> 233 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229496C6E48> 234 <tensorflow.python.keras.layers.core.Activation object at 0x00000229496E0748> 235 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022949764FC8> 236 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229496E0808> 237 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229497511C8> 238 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963299D48> 239 <tensorflow.python.keras.layers.core.Activation object at 0x0000022949751E08> 240 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963291648> 241 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022949751B48> 242 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022963298388> 243 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022949761088> 244 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022963261C48> 245 <tensorflow.python.keras.layers.core.Activation object at 0x0000022949761988> 246 <tensorflow.python.keras.layers.core.Activation object at 0x0000022963261DC8> 247 <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x00000229621A5288> 248 <tensorflow.python.keras.layers.merge.Concatenate object at 0x00000229621B3A08> 249 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295B4D2AC8> 250 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B4DAB88> 251 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B507448> 252 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229621B5CC8> 253 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295B4F6048> 254 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x0000022962193188> 255 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B4FDA08> 256 <tensorflow.python.keras.layers.core.Activation object at 0x0000022962193488> 257 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B50A308> 258 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x0000022962193A88> 259 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295DBCFA08> 260 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295B5E9FC8> 261 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295B5DADC8> 262 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x00000229497B1388> 263 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229621B3D08> 264 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DBC5B88> 265 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295DBC1F48> 266 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B600908> 267 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295B5DC808> 268 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229497B1988> 269 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229621B52C8> 270 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DBCF948> 271 <tensorflow.python.keras.layers.core.Activation object at 0x000002295DBF3808> 272 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B5F1208> 273 <tensorflow.python.keras.layers.core.Activation object at 0x000002295B5EC248> 274 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229497D6948> 275 <tensorflow.python.keras.layers.core.Activation object at 0x00000229621B5D48> 276 <tensorflow.python.keras.layers.merge.Concatenate object at 0x000002295B50BBC8> 277 <tensorflow.python.keras.layers.merge.Concatenate object at 0x00000229497EA6C8> 278 <tensorflow.python.keras.layers.core.Activation object at 0x00000229497D0648> 279 <tensorflow.python.keras.layers.merge.Concatenate object at 0x00000229497E9FC8> 280 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F250388> 281 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F23C408> 282 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F23CD48> 283 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D3DAC88> 284 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F23CF08> 285 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D3A3A48> 286 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295CAFF288> 287 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D3BB308> 288 <tensorflow.python.keras.layers.core.Activation object at 0x000002295CAFFD08> 289 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D3A2FC8> 290 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295F246DC8> 291 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295CAF4A48> 292 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D36C248> 293 <tensorflow.python.keras.layers.pooling.AveragePooling2D object at 0x000002295D209A08> 294 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x00000229497C78C8> 295 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D3A5908> 296 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295F253808> 297 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D36F7C8> 298 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D375908> 299 <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000002295D1E9808> 300 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x00000229497D3B88> 301 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F24FCC8> 302 <tensorflow.python.keras.layers.core.Activation object at 0x000002295F253908> 303 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D36FAC8> 304 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D37CD08> 305 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002295D1EC248> 306 <tensorflow.python.keras.layers.core.Activation object at 0x00000229497E6488> 307 <tensorflow.python.keras.layers.merge.Concatenate object at 0x000002295F2649C8> 308 <tensorflow.python.keras.layers.merge.Concatenate object at 0x000002295D397248> 309 <tensorflow.python.keras.layers.core.Activation object at 0x000002295D1ECE08> 310 <tensorflow.python.keras.layers.merge.Concatenate object at 0x000002295D210BC8> 311 <tensorflow.python.keras.layers.pooling.GlobalAveragePooling2D object at 0x000002295F03DEC8> 312 <tensorflow.python.keras.layers.core.Dense object at 0x000002295F03DAC8> 313 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002293CE03BC8> 314 <tensorflow.python.keras.layers.core.Dropout object at 0x000002295F0544C8> 315 <tensorflow.python.keras.layers.core.Dense object at 0x000002295F054788> 316 <tensorflow.python.keras.layers.normalization_v2.BatchNormalization object at 0x000002293CE06448> 317 <tensorflow.python.keras.layers.core.Dense object at 0x000002293CE27308>
print("Setting Custom Inception V3 layers untrainable")
for layer in inceptionv3_model.layers:
layer.trainable = False
inceptionv3_model.summary()
Setting Custom Inception V3 layers untrainable
Model: "functional_53"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_27 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 111, 111, 32) 864 input_27[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 111, 111, 32) 96 conv2d_130[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, 111, 111, 32) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 109, 109, 32) 9216 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 109, 109, 32) 96 conv2d_131[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 109, 109, 32) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 109, 109, 64) 18432 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 109, 109, 64) 192 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 109, 109, 64) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
max_pooling2d_126 (MaxPooling2D (None, 54, 54, 64) 0 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 54, 54, 80) 5120 max_pooling2d_126[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 54, 54, 80) 240 conv2d_133[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 54, 54, 80) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 52, 52, 192) 138240 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 52, 52, 192) 576 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 52, 52, 192) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
max_pooling2d_127 (MaxPooling2D (None, 25, 25, 192) 0 activation_4[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 25, 25, 64) 192 conv2d_138[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 25, 25, 64) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 25, 25, 48) 9216 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 25, 25, 96) 55296 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 25, 25, 48) 144 conv2d_136[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 25, 25, 96) 288 conv2d_139[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 25, 25, 48) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 25, 25, 96) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 25, 25, 192) 0 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 25, 25, 64) 76800 activation_6[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 25, 25, 96) 82944 activation_9[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 25, 25, 32) 6144 average_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 25, 25, 64) 192 conv2d_135[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 25, 25, 64) 192 conv2d_137[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 25, 25, 96) 288 conv2d_140[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 25, 25, 32) 96 conv2d_141[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 25, 25, 64) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 25, 25, 64) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 25, 25, 96) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 25, 25, 32) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, 25, 25, 256) 0 activation_5[0][0]
activation_7[0][0]
activation_10[0][0]
activation_11[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 25, 25, 64) 192 conv2d_145[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 25, 25, 64) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 25, 25, 48) 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 25, 25, 96) 55296 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 25, 25, 48) 144 conv2d_143[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 25, 25, 96) 288 conv2d_146[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 25, 25, 48) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 25, 25, 96) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 25, 25, 256) 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 25, 25, 64) 76800 activation_13[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 25, 25, 96) 82944 activation_16[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 25, 25, 64) 16384 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 25, 25, 64) 192 conv2d_142[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 25, 25, 64) 192 conv2d_144[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 25, 25, 96) 288 conv2d_147[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 25, 25, 64) 192 conv2d_148[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 25, 25, 64) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 25, 25, 64) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 25, 25, 96) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 25, 25, 64) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, 25, 25, 288) 0 activation_12[0][0]
activation_14[0][0]
activation_17[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 25, 25, 64) 192 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 25, 25, 64) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 25, 25, 48) 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 25, 25, 96) 55296 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 25, 25, 48) 144 conv2d_150[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 25, 25, 96) 288 conv2d_153[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 25, 25, 48) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 25, 25, 96) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 25, 25, 288) 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 25, 25, 64) 76800 activation_20[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 25, 25, 96) 82944 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 25, 25, 64) 18432 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 25, 25, 64) 192 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 25, 25, 64) 192 conv2d_151[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 25, 25, 96) 288 conv2d_154[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 25, 25, 64) 192 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 25, 25, 64) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 25, 25, 64) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 25, 25, 96) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 25, 25, 64) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, 25, 25, 288) 0 activation_19[0][0]
activation_21[0][0]
activation_24[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 25, 25, 64) 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 25, 25, 64) 192 conv2d_157[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 25, 25, 64) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 25, 25, 96) 55296 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 25, 25, 96) 288 conv2d_158[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 25, 25, 96) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 12, 12, 384) 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 12, 12, 96) 82944 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 12, 12, 384) 1152 conv2d_156[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 12, 12, 96) 288 conv2d_159[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 12, 12, 384) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 12, 12, 96) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
max_pooling2d_128 (MaxPooling2D (None, 12, 12, 288) 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, 12, 12, 768) 0 activation_26[0][0]
activation_29[0][0]
max_pooling2d_128[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 12, 12, 128) 384 conv2d_164[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 12, 12, 128) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 12, 12, 128) 114688 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 12, 12, 128) 384 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 12, 12, 128) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 12, 12, 128) 114688 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 12, 12, 128) 384 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 12, 12, 128) 384 conv2d_166[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 12, 12, 128) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 12, 12, 128) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 12, 12, 128) 114688 activation_31[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 12, 12, 128) 114688 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 12, 12, 128) 384 conv2d_162[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 12, 12, 128) 384 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 12, 12, 128) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 12, 12, 128) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 12, 12, 768) 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 12, 12, 192) 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 12, 12, 192) 172032 activation_32[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 12, 12, 192) 172032 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 12, 12, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 12, 12, 192) 576 conv2d_163[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 12, 12, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 12, 12, 192) 576 conv2d_169[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 12, 12, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 12, 12, 192) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 12, 12, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 12, 12, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, 12, 12, 768) 0 activation_30[0][0]
activation_33[0][0]
activation_38[0][0]
activation_39[0][0]
__________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 12, 12, 160) 480 conv2d_174[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 12, 12, 160) 0 batch_normalization_174[0][0]
__________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 12, 12, 160) 179200 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 12, 12, 160) 480 conv2d_175[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 12, 12, 160) 0 batch_normalization_175[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 12, 12, 160) 179200 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 12, 12, 160) 480 conv2d_171[0][0]
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 12, 12, 160) 480 conv2d_176[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 12, 12, 160) 0 batch_normalization_171[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 12, 12, 160) 0 batch_normalization_176[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 12, 12, 160) 179200 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 12, 12, 160) 179200 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 12, 12, 160) 480 conv2d_172[0][0]
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 12, 12, 160) 480 conv2d_177[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 12, 12, 160) 0 batch_normalization_172[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 12, 12, 160) 0 batch_normalization_177[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 12, 12, 768) 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 12, 12, 192) 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 12, 12, 192) 215040 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 12, 12, 192) 215040 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 12, 12, 192) 576 conv2d_170[0][0]
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 12, 12, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 12, 12, 192) 576 conv2d_178[0][0]
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 12, 12, 192) 576 conv2d_179[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 12, 12, 192) 0 batch_normalization_170[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 12, 12, 192) 0 batch_normalization_173[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 12, 12, 192) 0 batch_normalization_178[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 12, 12, 192) 0 batch_normalization_179[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, 12, 12, 768) 0 activation_40[0][0]
activation_43[0][0]
activation_48[0][0]
activation_49[0][0]
__________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 12, 12, 160) 480 conv2d_184[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, 12, 12, 160) 0 batch_normalization_184[0][0]
__________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 12, 12, 160) 179200 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 12, 12, 160) 480 conv2d_185[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, 12, 12, 160) 0 batch_normalization_185[0][0]
__________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 12, 12, 160) 179200 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 12, 12, 160) 480 conv2d_181[0][0]
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 12, 12, 160) 480 conv2d_186[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, 12, 12, 160) 0 batch_normalization_181[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, 12, 12, 160) 0 batch_normalization_186[0][0]
__________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 12, 12, 160) 179200 activation_51[0][0]
__________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 12, 12, 160) 179200 activation_56[0][0]
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 12, 12, 160) 480 conv2d_182[0][0]
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 12, 12, 160) 480 conv2d_187[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, 12, 12, 160) 0 batch_normalization_182[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, 12, 12, 160) 0 batch_normalization_187[0][0]
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 12, 12, 768) 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 12, 12, 192) 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 12, 12, 192) 215040 activation_52[0][0]
__________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 12, 12, 192) 215040 activation_57[0][0]
__________________________________________________________________________________________________
conv2d_189 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 12, 12, 192) 576 conv2d_180[0][0]
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 12, 12, 192) 576 conv2d_183[0][0]
__________________________________________________________________________________________________
batch_normalization_188 (BatchN (None, 12, 12, 192) 576 conv2d_188[0][0]
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 12, 12, 192) 576 conv2d_189[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 12, 12, 192) 0 batch_normalization_180[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, 12, 12, 192) 0 batch_normalization_183[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, 12, 12, 192) 0 batch_normalization_188[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, 12, 12, 192) 0 batch_normalization_189[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, 12, 12, 768) 0 activation_50[0][0]
activation_53[0][0]
activation_58[0][0]
activation_59[0][0]
__________________________________________________________________________________________________
conv2d_194 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 12, 12, 192) 576 conv2d_194[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, 12, 12, 192) 0 batch_normalization_194[0][0]
__________________________________________________________________________________________________
conv2d_195 (Conv2D) (None, 12, 12, 192) 258048 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 12, 12, 192) 576 conv2d_195[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, 12, 12, 192) 0 batch_normalization_195[0][0]
__________________________________________________________________________________________________
conv2d_191 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_196 (Conv2D) (None, 12, 12, 192) 258048 activation_65[0][0]
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 12, 12, 192) 576 conv2d_191[0][0]
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 12, 12, 192) 576 conv2d_196[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, 12, 12, 192) 0 batch_normalization_191[0][0]
__________________________________________________________________________________________________
activation_66 (Activation) (None, 12, 12, 192) 0 batch_normalization_196[0][0]
__________________________________________________________________________________________________
conv2d_192 (Conv2D) (None, 12, 12, 192) 258048 activation_61[0][0]
__________________________________________________________________________________________________
conv2d_197 (Conv2D) (None, 12, 12, 192) 258048 activation_66[0][0]
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 12, 12, 192) 576 conv2d_192[0][0]
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 12, 12, 192) 576 conv2d_197[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, 12, 12, 192) 0 batch_normalization_192[0][0]
__________________________________________________________________________________________________
activation_67 (Activation) (None, 12, 12, 192) 0 batch_normalization_197[0][0]
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 12, 12, 768) 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_190 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_193 (Conv2D) (None, 12, 12, 192) 258048 activation_62[0][0]
__________________________________________________________________________________________________
conv2d_198 (Conv2D) (None, 12, 12, 192) 258048 activation_67[0][0]
__________________________________________________________________________________________________
conv2d_199 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 12, 12, 192) 576 conv2d_190[0][0]
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 12, 12, 192) 576 conv2d_193[0][0]
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 12, 12, 192) 576 conv2d_198[0][0]
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 12, 12, 192) 576 conv2d_199[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, 12, 12, 192) 0 batch_normalization_190[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, 12, 12, 192) 0 batch_normalization_193[0][0]
__________________________________________________________________________________________________
activation_68 (Activation) (None, 12, 12, 192) 0 batch_normalization_198[0][0]
__________________________________________________________________________________________________
activation_69 (Activation) (None, 12, 12, 192) 0 batch_normalization_199[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, 12, 12, 768) 0 activation_60[0][0]
activation_63[0][0]
activation_68[0][0]
activation_69[0][0]
__________________________________________________________________________________________________
conv2d_202 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 12, 12, 192) 576 conv2d_202[0][0]
__________________________________________________________________________________________________
activation_72 (Activation) (None, 12, 12, 192) 0 batch_normalization_202[0][0]
__________________________________________________________________________________________________
conv2d_203 (Conv2D) (None, 12, 12, 192) 258048 activation_72[0][0]
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 12, 12, 192) 576 conv2d_203[0][0]
__________________________________________________________________________________________________
activation_73 (Activation) (None, 12, 12, 192) 0 batch_normalization_203[0][0]
__________________________________________________________________________________________________
conv2d_200 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_204 (Conv2D) (None, 12, 12, 192) 258048 activation_73[0][0]
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 12, 12, 192) 576 conv2d_200[0][0]
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 12, 12, 192) 576 conv2d_204[0][0]
__________________________________________________________________________________________________
activation_70 (Activation) (None, 12, 12, 192) 0 batch_normalization_200[0][0]
__________________________________________________________________________________________________
activation_74 (Activation) (None, 12, 12, 192) 0 batch_normalization_204[0][0]
__________________________________________________________________________________________________
conv2d_201 (Conv2D) (None, 5, 5, 320) 552960 activation_70[0][0]
__________________________________________________________________________________________________
conv2d_205 (Conv2D) (None, 5, 5, 192) 331776 activation_74[0][0]
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 5, 5, 320) 960 conv2d_201[0][0]
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 5, 5, 192) 576 conv2d_205[0][0]
__________________________________________________________________________________________________
activation_71 (Activation) (None, 5, 5, 320) 0 batch_normalization_201[0][0]
__________________________________________________________________________________________________
activation_75 (Activation) (None, 5, 5, 192) 0 batch_normalization_205[0][0]
__________________________________________________________________________________________________
max_pooling2d_129 (MaxPooling2D (None, 5, 5, 768) 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, 5, 5, 1280) 0 activation_71[0][0]
activation_75[0][0]
max_pooling2d_129[0][0]
__________________________________________________________________________________________________
conv2d_210 (Conv2D) (None, 5, 5, 448) 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_210 (BatchN (None, 5, 5, 448) 1344 conv2d_210[0][0]
__________________________________________________________________________________________________
activation_80 (Activation) (None, 5, 5, 448) 0 batch_normalization_210[0][0]
__________________________________________________________________________________________________
conv2d_207 (Conv2D) (None, 5, 5, 384) 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_211 (Conv2D) (None, 5, 5, 384) 1548288 activation_80[0][0]
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 5, 5, 384) 1152 conv2d_207[0][0]
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 5, 5, 384) 1152 conv2d_211[0][0]
__________________________________________________________________________________________________
activation_77 (Activation) (None, 5, 5, 384) 0 batch_normalization_207[0][0]
__________________________________________________________________________________________________
activation_81 (Activation) (None, 5, 5, 384) 0 batch_normalization_211[0][0]
__________________________________________________________________________________________________
conv2d_208 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_209 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_212 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
conv2d_213 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 5, 5, 1280) 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_206 (Conv2D) (None, 5, 5, 320) 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_208 (BatchN (None, 5, 5, 384) 1152 conv2d_208[0][0]
__________________________________________________________________________________________________
batch_normalization_209 (BatchN (None, 5, 5, 384) 1152 conv2d_209[0][0]
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 5, 5, 384) 1152 conv2d_212[0][0]
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 5, 5, 384) 1152 conv2d_213[0][0]
__________________________________________________________________________________________________
conv2d_214 (Conv2D) (None, 5, 5, 192) 245760 average_pooling2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 5, 5, 320) 960 conv2d_206[0][0]
__________________________________________________________________________________________________
activation_78 (Activation) (None, 5, 5, 384) 0 batch_normalization_208[0][0]
__________________________________________________________________________________________________
activation_79 (Activation) (None, 5, 5, 384) 0 batch_normalization_209[0][0]
__________________________________________________________________________________________________
activation_82 (Activation) (None, 5, 5, 384) 0 batch_normalization_212[0][0]
__________________________________________________________________________________________________
activation_83 (Activation) (None, 5, 5, 384) 0 batch_normalization_213[0][0]
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 5, 5, 192) 576 conv2d_214[0][0]
__________________________________________________________________________________________________
activation_76 (Activation) (None, 5, 5, 320) 0 batch_normalization_206[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, 5, 5, 768) 0 activation_78[0][0]
activation_79[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, 5, 5, 768) 0 activation_82[0][0]
activation_83[0][0]
__________________________________________________________________________________________________
activation_84 (Activation) (None, 5, 5, 192) 0 batch_normalization_214[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, 5, 5, 2048) 0 activation_76[0][0]
mixed9_0[0][0]
concatenate[0][0]
activation_84[0][0]
__________________________________________________________________________________________________
conv2d_219 (Conv2D) (None, 5, 5, 448) 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 5, 5, 448) 1344 conv2d_219[0][0]
__________________________________________________________________________________________________
activation_89 (Activation) (None, 5, 5, 448) 0 batch_normalization_219[0][0]
__________________________________________________________________________________________________
conv2d_216 (Conv2D) (None, 5, 5, 384) 786432 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_220 (Conv2D) (None, 5, 5, 384) 1548288 activation_89[0][0]
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 5, 5, 384) 1152 conv2d_216[0][0]
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 5, 5, 384) 1152 conv2d_220[0][0]
__________________________________________________________________________________________________
activation_86 (Activation) (None, 5, 5, 384) 0 batch_normalization_216[0][0]
__________________________________________________________________________________________________
activation_90 (Activation) (None, 5, 5, 384) 0 batch_normalization_220[0][0]
__________________________________________________________________________________________________
conv2d_217 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_218 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_221 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
conv2d_222 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 5, 5, 2048) 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_215 (Conv2D) (None, 5, 5, 320) 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 5, 5, 384) 1152 conv2d_217[0][0]
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 5, 5, 384) 1152 conv2d_218[0][0]
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 5, 5, 384) 1152 conv2d_221[0][0]
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 5, 5, 384) 1152 conv2d_222[0][0]
__________________________________________________________________________________________________
conv2d_223 (Conv2D) (None, 5, 5, 192) 393216 average_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 5, 5, 320) 960 conv2d_215[0][0]
__________________________________________________________________________________________________
activation_87 (Activation) (None, 5, 5, 384) 0 batch_normalization_217[0][0]
__________________________________________________________________________________________________
activation_88 (Activation) (None, 5, 5, 384) 0 batch_normalization_218[0][0]
__________________________________________________________________________________________________
activation_91 (Activation) (None, 5, 5, 384) 0 batch_normalization_221[0][0]
__________________________________________________________________________________________________
activation_92 (Activation) (None, 5, 5, 384) 0 batch_normalization_222[0][0]
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 5, 5, 192) 576 conv2d_223[0][0]
__________________________________________________________________________________________________
activation_85 (Activation) (None, 5, 5, 320) 0 batch_normalization_215[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, 5, 5, 768) 0 activation_87[0][0]
activation_88[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 5, 5, 768) 0 activation_91[0][0]
activation_92[0][0]
__________________________________________________________________________________________________
activation_93 (Activation) (None, 5, 5, 192) 0 batch_normalization_223[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, 5, 5, 2048) 0 activation_85[0][0]
mixed9_1[0][0]
concatenate_1[0][0]
activation_93[0][0]
__________________________________________________________________________________________________
global_average_pooling2d_26 (Gl (None, 2048) 0 mixed10[0][0]
__________________________________________________________________________________________________
dense_78 (Dense) (None, 512) 1049088 global_average_pooling2d_26[0][0]
__________________________________________________________________________________________________
batch_normalization_224 (BatchN (None, 512) 2048 dense_78[0][0]
__________________________________________________________________________________________________
dropout_52 (Dropout) (None, 512) 0 batch_normalization_224[0][0]
__________________________________________________________________________________________________
dense_79 (Dense) (None, 256) 131328 dropout_52[0][0]
__________________________________________________________________________________________________
batch_normalization_225 (BatchN (None, 256) 1024 dense_79[0][0]
__________________________________________________________________________________________________
predictions (Dense) (None, 120) 30840 batch_normalization_225[0][0]
==================================================================================================
Total params: 23,017,112
Trainable params: 0
Non-trainable params: 23,017,112
__________________________________________________________________________________________________
svm_inceptionv3_model = Model(inputs=inceptionv3_model.inputs, outputs=inceptionv3_model.layers[-2].output)
svm_inceptionv3_model.summary()
Model: "functional_61"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_27 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 111, 111, 32) 864 input_27[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 111, 111, 32) 96 conv2d_130[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, 111, 111, 32) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 109, 109, 32) 9216 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 109, 109, 32) 96 conv2d_131[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 109, 109, 32) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 109, 109, 64) 18432 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 109, 109, 64) 192 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 109, 109, 64) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
max_pooling2d_126 (MaxPooling2D (None, 54, 54, 64) 0 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 54, 54, 80) 5120 max_pooling2d_126[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 54, 54, 80) 240 conv2d_133[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 54, 54, 80) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 52, 52, 192) 138240 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 52, 52, 192) 576 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 52, 52, 192) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
max_pooling2d_127 (MaxPooling2D (None, 25, 25, 192) 0 activation_4[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 25, 25, 64) 192 conv2d_138[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 25, 25, 64) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 25, 25, 48) 9216 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 25, 25, 96) 55296 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 25, 25, 48) 144 conv2d_136[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 25, 25, 96) 288 conv2d_139[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 25, 25, 48) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 25, 25, 96) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 25, 25, 192) 0 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_127[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 25, 25, 64) 76800 activation_6[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 25, 25, 96) 82944 activation_9[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 25, 25, 32) 6144 average_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 25, 25, 64) 192 conv2d_135[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 25, 25, 64) 192 conv2d_137[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 25, 25, 96) 288 conv2d_140[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 25, 25, 32) 96 conv2d_141[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 25, 25, 64) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 25, 25, 64) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 25, 25, 96) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 25, 25, 32) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, 25, 25, 256) 0 activation_5[0][0]
activation_7[0][0]
activation_10[0][0]
activation_11[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 25, 25, 64) 192 conv2d_145[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 25, 25, 64) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 25, 25, 48) 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 25, 25, 96) 55296 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 25, 25, 48) 144 conv2d_143[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 25, 25, 96) 288 conv2d_146[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 25, 25, 48) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 25, 25, 96) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 25, 25, 256) 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 25, 25, 64) 76800 activation_13[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 25, 25, 96) 82944 activation_16[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 25, 25, 64) 16384 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 25, 25, 64) 192 conv2d_142[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 25, 25, 64) 192 conv2d_144[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 25, 25, 96) 288 conv2d_147[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 25, 25, 64) 192 conv2d_148[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 25, 25, 64) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 25, 25, 64) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 25, 25, 96) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 25, 25, 64) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, 25, 25, 288) 0 activation_12[0][0]
activation_14[0][0]
activation_17[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 25, 25, 64) 192 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 25, 25, 64) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 25, 25, 48) 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 25, 25, 96) 55296 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 25, 25, 48) 144 conv2d_150[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 25, 25, 96) 288 conv2d_153[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 25, 25, 48) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 25, 25, 96) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 25, 25, 288) 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 25, 25, 64) 76800 activation_20[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 25, 25, 96) 82944 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 25, 25, 64) 18432 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 25, 25, 64) 192 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 25, 25, 64) 192 conv2d_151[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 25, 25, 96) 288 conv2d_154[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 25, 25, 64) 192 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 25, 25, 64) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 25, 25, 64) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 25, 25, 96) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 25, 25, 64) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, 25, 25, 288) 0 activation_19[0][0]
activation_21[0][0]
activation_24[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 25, 25, 64) 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 25, 25, 64) 192 conv2d_157[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 25, 25, 64) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 25, 25, 96) 55296 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 25, 25, 96) 288 conv2d_158[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 25, 25, 96) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 12, 12, 384) 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 12, 12, 96) 82944 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 12, 12, 384) 1152 conv2d_156[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 12, 12, 96) 288 conv2d_159[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 12, 12, 384) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 12, 12, 96) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
max_pooling2d_128 (MaxPooling2D (None, 12, 12, 288) 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, 12, 12, 768) 0 activation_26[0][0]
activation_29[0][0]
max_pooling2d_128[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 12, 12, 128) 384 conv2d_164[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 12, 12, 128) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 12, 12, 128) 114688 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 12, 12, 128) 384 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 12, 12, 128) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 12, 12, 128) 114688 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 12, 12, 128) 384 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 12, 12, 128) 384 conv2d_166[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 12, 12, 128) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 12, 12, 128) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 12, 12, 128) 114688 activation_31[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 12, 12, 128) 114688 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 12, 12, 128) 384 conv2d_162[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 12, 12, 128) 384 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 12, 12, 128) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 12, 12, 128) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 12, 12, 768) 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 12, 12, 192) 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 12, 12, 192) 172032 activation_32[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 12, 12, 192) 172032 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 12, 12, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 12, 12, 192) 576 conv2d_163[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 12, 12, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 12, 12, 192) 576 conv2d_169[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 12, 12, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 12, 12, 192) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 12, 12, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 12, 12, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, 12, 12, 768) 0 activation_30[0][0]
activation_33[0][0]
activation_38[0][0]
activation_39[0][0]
__________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 12, 12, 160) 480 conv2d_174[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 12, 12, 160) 0 batch_normalization_174[0][0]
__________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 12, 12, 160) 179200 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 12, 12, 160) 480 conv2d_175[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 12, 12, 160) 0 batch_normalization_175[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 12, 12, 160) 179200 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 12, 12, 160) 480 conv2d_171[0][0]
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 12, 12, 160) 480 conv2d_176[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 12, 12, 160) 0 batch_normalization_171[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 12, 12, 160) 0 batch_normalization_176[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 12, 12, 160) 179200 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 12, 12, 160) 179200 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 12, 12, 160) 480 conv2d_172[0][0]
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 12, 12, 160) 480 conv2d_177[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 12, 12, 160) 0 batch_normalization_172[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 12, 12, 160) 0 batch_normalization_177[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 12, 12, 768) 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 12, 12, 192) 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 12, 12, 192) 215040 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 12, 12, 192) 215040 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 12, 12, 192) 576 conv2d_170[0][0]
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 12, 12, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 12, 12, 192) 576 conv2d_178[0][0]
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 12, 12, 192) 576 conv2d_179[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 12, 12, 192) 0 batch_normalization_170[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 12, 12, 192) 0 batch_normalization_173[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 12, 12, 192) 0 batch_normalization_178[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 12, 12, 192) 0 batch_normalization_179[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, 12, 12, 768) 0 activation_40[0][0]
activation_43[0][0]
activation_48[0][0]
activation_49[0][0]
__________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 12, 12, 160) 480 conv2d_184[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, 12, 12, 160) 0 batch_normalization_184[0][0]
__________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 12, 12, 160) 179200 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 12, 12, 160) 480 conv2d_185[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, 12, 12, 160) 0 batch_normalization_185[0][0]
__________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 12, 12, 160) 179200 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 12, 12, 160) 480 conv2d_181[0][0]
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 12, 12, 160) 480 conv2d_186[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, 12, 12, 160) 0 batch_normalization_181[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, 12, 12, 160) 0 batch_normalization_186[0][0]
__________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 12, 12, 160) 179200 activation_51[0][0]
__________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 12, 12, 160) 179200 activation_56[0][0]
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 12, 12, 160) 480 conv2d_182[0][0]
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 12, 12, 160) 480 conv2d_187[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, 12, 12, 160) 0 batch_normalization_182[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, 12, 12, 160) 0 batch_normalization_187[0][0]
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 12, 12, 768) 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 12, 12, 192) 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 12, 12, 192) 215040 activation_52[0][0]
__________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 12, 12, 192) 215040 activation_57[0][0]
__________________________________________________________________________________________________
conv2d_189 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 12, 12, 192) 576 conv2d_180[0][0]
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 12, 12, 192) 576 conv2d_183[0][0]
__________________________________________________________________________________________________
batch_normalization_188 (BatchN (None, 12, 12, 192) 576 conv2d_188[0][0]
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 12, 12, 192) 576 conv2d_189[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 12, 12, 192) 0 batch_normalization_180[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, 12, 12, 192) 0 batch_normalization_183[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, 12, 12, 192) 0 batch_normalization_188[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, 12, 12, 192) 0 batch_normalization_189[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, 12, 12, 768) 0 activation_50[0][0]
activation_53[0][0]
activation_58[0][0]
activation_59[0][0]
__________________________________________________________________________________________________
conv2d_194 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 12, 12, 192) 576 conv2d_194[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, 12, 12, 192) 0 batch_normalization_194[0][0]
__________________________________________________________________________________________________
conv2d_195 (Conv2D) (None, 12, 12, 192) 258048 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 12, 12, 192) 576 conv2d_195[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, 12, 12, 192) 0 batch_normalization_195[0][0]
__________________________________________________________________________________________________
conv2d_191 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_196 (Conv2D) (None, 12, 12, 192) 258048 activation_65[0][0]
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 12, 12, 192) 576 conv2d_191[0][0]
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 12, 12, 192) 576 conv2d_196[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, 12, 12, 192) 0 batch_normalization_191[0][0]
__________________________________________________________________________________________________
activation_66 (Activation) (None, 12, 12, 192) 0 batch_normalization_196[0][0]
__________________________________________________________________________________________________
conv2d_192 (Conv2D) (None, 12, 12, 192) 258048 activation_61[0][0]
__________________________________________________________________________________________________
conv2d_197 (Conv2D) (None, 12, 12, 192) 258048 activation_66[0][0]
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 12, 12, 192) 576 conv2d_192[0][0]
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 12, 12, 192) 576 conv2d_197[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, 12, 12, 192) 0 batch_normalization_192[0][0]
__________________________________________________________________________________________________
activation_67 (Activation) (None, 12, 12, 192) 0 batch_normalization_197[0][0]
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 12, 12, 768) 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_190 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_193 (Conv2D) (None, 12, 12, 192) 258048 activation_62[0][0]
__________________________________________________________________________________________________
conv2d_198 (Conv2D) (None, 12, 12, 192) 258048 activation_67[0][0]
__________________________________________________________________________________________________
conv2d_199 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 12, 12, 192) 576 conv2d_190[0][0]
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 12, 12, 192) 576 conv2d_193[0][0]
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 12, 12, 192) 576 conv2d_198[0][0]
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 12, 12, 192) 576 conv2d_199[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, 12, 12, 192) 0 batch_normalization_190[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, 12, 12, 192) 0 batch_normalization_193[0][0]
__________________________________________________________________________________________________
activation_68 (Activation) (None, 12, 12, 192) 0 batch_normalization_198[0][0]
__________________________________________________________________________________________________
activation_69 (Activation) (None, 12, 12, 192) 0 batch_normalization_199[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, 12, 12, 768) 0 activation_60[0][0]
activation_63[0][0]
activation_68[0][0]
activation_69[0][0]
__________________________________________________________________________________________________
conv2d_202 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 12, 12, 192) 576 conv2d_202[0][0]
__________________________________________________________________________________________________
activation_72 (Activation) (None, 12, 12, 192) 0 batch_normalization_202[0][0]
__________________________________________________________________________________________________
conv2d_203 (Conv2D) (None, 12, 12, 192) 258048 activation_72[0][0]
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 12, 12, 192) 576 conv2d_203[0][0]
__________________________________________________________________________________________________
activation_73 (Activation) (None, 12, 12, 192) 0 batch_normalization_203[0][0]
__________________________________________________________________________________________________
conv2d_200 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_204 (Conv2D) (None, 12, 12, 192) 258048 activation_73[0][0]
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 12, 12, 192) 576 conv2d_200[0][0]
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 12, 12, 192) 576 conv2d_204[0][0]
__________________________________________________________________________________________________
activation_70 (Activation) (None, 12, 12, 192) 0 batch_normalization_200[0][0]
__________________________________________________________________________________________________
activation_74 (Activation) (None, 12, 12, 192) 0 batch_normalization_204[0][0]
__________________________________________________________________________________________________
conv2d_201 (Conv2D) (None, 5, 5, 320) 552960 activation_70[0][0]
__________________________________________________________________________________________________
conv2d_205 (Conv2D) (None, 5, 5, 192) 331776 activation_74[0][0]
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 5, 5, 320) 960 conv2d_201[0][0]
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 5, 5, 192) 576 conv2d_205[0][0]
__________________________________________________________________________________________________
activation_71 (Activation) (None, 5, 5, 320) 0 batch_normalization_201[0][0]
__________________________________________________________________________________________________
activation_75 (Activation) (None, 5, 5, 192) 0 batch_normalization_205[0][0]
__________________________________________________________________________________________________
max_pooling2d_129 (MaxPooling2D (None, 5, 5, 768) 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, 5, 5, 1280) 0 activation_71[0][0]
activation_75[0][0]
max_pooling2d_129[0][0]
__________________________________________________________________________________________________
conv2d_210 (Conv2D) (None, 5, 5, 448) 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_210 (BatchN (None, 5, 5, 448) 1344 conv2d_210[0][0]
__________________________________________________________________________________________________
activation_80 (Activation) (None, 5, 5, 448) 0 batch_normalization_210[0][0]
__________________________________________________________________________________________________
conv2d_207 (Conv2D) (None, 5, 5, 384) 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_211 (Conv2D) (None, 5, 5, 384) 1548288 activation_80[0][0]
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 5, 5, 384) 1152 conv2d_207[0][0]
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 5, 5, 384) 1152 conv2d_211[0][0]
__________________________________________________________________________________________________
activation_77 (Activation) (None, 5, 5, 384) 0 batch_normalization_207[0][0]
__________________________________________________________________________________________________
activation_81 (Activation) (None, 5, 5, 384) 0 batch_normalization_211[0][0]
__________________________________________________________________________________________________
conv2d_208 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_209 (Conv2D) (None, 5, 5, 384) 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_212 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
conv2d_213 (Conv2D) (None, 5, 5, 384) 442368 activation_81[0][0]
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 5, 5, 1280) 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_206 (Conv2D) (None, 5, 5, 320) 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_208 (BatchN (None, 5, 5, 384) 1152 conv2d_208[0][0]
__________________________________________________________________________________________________
batch_normalization_209 (BatchN (None, 5, 5, 384) 1152 conv2d_209[0][0]
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 5, 5, 384) 1152 conv2d_212[0][0]
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 5, 5, 384) 1152 conv2d_213[0][0]
__________________________________________________________________________________________________
conv2d_214 (Conv2D) (None, 5, 5, 192) 245760 average_pooling2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 5, 5, 320) 960 conv2d_206[0][0]
__________________________________________________________________________________________________
activation_78 (Activation) (None, 5, 5, 384) 0 batch_normalization_208[0][0]
__________________________________________________________________________________________________
activation_79 (Activation) (None, 5, 5, 384) 0 batch_normalization_209[0][0]
__________________________________________________________________________________________________
activation_82 (Activation) (None, 5, 5, 384) 0 batch_normalization_212[0][0]
__________________________________________________________________________________________________
activation_83 (Activation) (None, 5, 5, 384) 0 batch_normalization_213[0][0]
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 5, 5, 192) 576 conv2d_214[0][0]
__________________________________________________________________________________________________
activation_76 (Activation) (None, 5, 5, 320) 0 batch_normalization_206[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, 5, 5, 768) 0 activation_78[0][0]
activation_79[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, 5, 5, 768) 0 activation_82[0][0]
activation_83[0][0]
__________________________________________________________________________________________________
activation_84 (Activation) (None, 5, 5, 192) 0 batch_normalization_214[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, 5, 5, 2048) 0 activation_76[0][0]
mixed9_0[0][0]
concatenate[0][0]
activation_84[0][0]
__________________________________________________________________________________________________
conv2d_219 (Conv2D) (None, 5, 5, 448) 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 5, 5, 448) 1344 conv2d_219[0][0]
__________________________________________________________________________________________________
activation_89 (Activation) (None, 5, 5, 448) 0 batch_normalization_219[0][0]
__________________________________________________________________________________________________
conv2d_216 (Conv2D) (None, 5, 5, 384) 786432 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_220 (Conv2D) (None, 5, 5, 384) 1548288 activation_89[0][0]
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 5, 5, 384) 1152 conv2d_216[0][0]
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 5, 5, 384) 1152 conv2d_220[0][0]
__________________________________________________________________________________________________
activation_86 (Activation) (None, 5, 5, 384) 0 batch_normalization_216[0][0]
__________________________________________________________________________________________________
activation_90 (Activation) (None, 5, 5, 384) 0 batch_normalization_220[0][0]
__________________________________________________________________________________________________
conv2d_217 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_218 (Conv2D) (None, 5, 5, 384) 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_221 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
conv2d_222 (Conv2D) (None, 5, 5, 384) 442368 activation_90[0][0]
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 5, 5, 2048) 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_215 (Conv2D) (None, 5, 5, 320) 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 5, 5, 384) 1152 conv2d_217[0][0]
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 5, 5, 384) 1152 conv2d_218[0][0]
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 5, 5, 384) 1152 conv2d_221[0][0]
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 5, 5, 384) 1152 conv2d_222[0][0]
__________________________________________________________________________________________________
conv2d_223 (Conv2D) (None, 5, 5, 192) 393216 average_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 5, 5, 320) 960 conv2d_215[0][0]
__________________________________________________________________________________________________
activation_87 (Activation) (None, 5, 5, 384) 0 batch_normalization_217[0][0]
__________________________________________________________________________________________________
activation_88 (Activation) (None, 5, 5, 384) 0 batch_normalization_218[0][0]
__________________________________________________________________________________________________
activation_91 (Activation) (None, 5, 5, 384) 0 batch_normalization_221[0][0]
__________________________________________________________________________________________________
activation_92 (Activation) (None, 5, 5, 384) 0 batch_normalization_222[0][0]
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 5, 5, 192) 576 conv2d_223[0][0]
__________________________________________________________________________________________________
activation_85 (Activation) (None, 5, 5, 320) 0 batch_normalization_215[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, 5, 5, 768) 0 activation_87[0][0]
activation_88[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 5, 5, 768) 0 activation_91[0][0]
activation_92[0][0]
__________________________________________________________________________________________________
activation_93 (Activation) (None, 5, 5, 192) 0 batch_normalization_223[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, 5, 5, 2048) 0 activation_85[0][0]
mixed9_1[0][0]
concatenate_1[0][0]
activation_93[0][0]
__________________________________________________________________________________________________
global_average_pooling2d_26 (Gl (None, 2048) 0 mixed10[0][0]
__________________________________________________________________________________________________
dense_78 (Dense) (None, 512) 1049088 global_average_pooling2d_26[0][0]
__________________________________________________________________________________________________
batch_normalization_224 (BatchN (None, 512) 2048 dense_78[0][0]
__________________________________________________________________________________________________
dropout_52 (Dropout) (None, 512) 0 batch_normalization_224[0][0]
__________________________________________________________________________________________________
dense_79 (Dense) (None, 256) 131328 dropout_52[0][0]
__________________________________________________________________________________________________
batch_normalization_225 (BatchN (None, 256) 1024 dense_79[0][0]
==================================================================================================
Total params: 22,986,272
Trainable params: 0
Non-trainable params: 22,986,272
__________________________________________________________________________________________________
def extract_features(model, X):
return model.predict(X)
def hot_encode_to_index(Y):
return np.argmax(Y, axis=1)
train_features = extract_features(svm_inceptionv3_model, X_train)
valid_features = extract_features(svm_inceptionv3_model, X_valid)
svm_features = np.concatenate((train_features, valid_features))
svm_labels = np.concatenate((hot_encode_to_index(y_train), hot_encode_to_index(y_valid)))
print(svm_labels)
[105 5 90 ... 99 34 26]
display_image("rbf.png")
from sklearn.model_selection import GridSearchCV
from sklearn.svm import SVC
parameters = {
'kernel':['rbf'], # Radial basis function kernel
'gamma': [1e-3, 1e-4], # Intuitively, the gamma parameter defines how far the influence of a single training example reaches, with low values meaning ‘far’ and high values meaning ‘close’.
'C':[1, 10, 100, 1000]
}
'''Grid search is the process of performing hyper parameter tuning in order to determine the optimal values for a given model.
A cross validation process is performed in order to determine the hyper parameter value set which provides the best accuracy levels.'''
clf = GridSearchCV(estimator=SVC(), param_grid=parameters, verbose=10)
clf = clf.fit(svm_features / 255., svm_labels)
Fitting 5 folds for each of 8 candidates, totalling 40 fits [CV] C=1, gamma=0.001, kernel=rbf ....................................
[Parallel(n_jobs=1)]: Using backend SequentialBackend with 1 concurrent workers.
[CV] ........ C=1, gamma=0.001, kernel=rbf, score=0.013, total= 55.9s [CV] C=1, gamma=0.001, kernel=rbf ....................................
[Parallel(n_jobs=1)]: Done 1 out of 1 | elapsed: 55.8s remaining: 0.0s
[CV] ........ C=1, gamma=0.001, kernel=rbf, score=0.012, total= 57.6s [CV] C=1, gamma=0.001, kernel=rbf ....................................
[Parallel(n_jobs=1)]: Done 2 out of 2 | elapsed: 1.9min remaining: 0.0s
[CV] ........ C=1, gamma=0.001, kernel=rbf, score=0.012, total= 55.8s [CV] C=1, gamma=0.001, kernel=rbf ....................................
[Parallel(n_jobs=1)]: Done 3 out of 3 | elapsed: 2.8min remaining: 0.0s
[CV] ........ C=1, gamma=0.001, kernel=rbf, score=0.012, total= 55.6s [CV] C=1, gamma=0.001, kernel=rbf ....................................
[Parallel(n_jobs=1)]: Done 4 out of 4 | elapsed: 3.7min remaining: 0.0s
[CV] ........ C=1, gamma=0.001, kernel=rbf, score=0.012, total= 57.1s [CV] C=1, gamma=0.0001, kernel=rbf ...................................
[Parallel(n_jobs=1)]: Done 5 out of 5 | elapsed: 4.7min remaining: 0.0s
[CV] ....... C=1, gamma=0.0001, kernel=rbf, score=0.013, total= 55.0s [CV] C=1, gamma=0.0001, kernel=rbf ...................................
[Parallel(n_jobs=1)]: Done 6 out of 6 | elapsed: 5.6min remaining: 0.0s
[CV] ....... C=1, gamma=0.0001, kernel=rbf, score=0.012, total= 57.0s [CV] C=1, gamma=0.0001, kernel=rbf ...................................
[Parallel(n_jobs=1)]: Done 7 out of 7 | elapsed: 6.6min remaining: 0.0s
[CV] ....... C=1, gamma=0.0001, kernel=rbf, score=0.012, total= 54.8s [CV] C=1, gamma=0.0001, kernel=rbf ...................................
[Parallel(n_jobs=1)]: Done 8 out of 8 | elapsed: 7.5min remaining: 0.0s
[CV] ....... C=1, gamma=0.0001, kernel=rbf, score=0.012, total= 57.6s [CV] C=1, gamma=0.0001, kernel=rbf ...................................
[Parallel(n_jobs=1)]: Done 9 out of 9 | elapsed: 8.4min remaining: 0.0s
[CV] ....... C=1, gamma=0.0001, kernel=rbf, score=0.012, total= 54.8s [CV] C=10, gamma=0.001, kernel=rbf ................................... [CV] ....... C=10, gamma=0.001, kernel=rbf, score=0.013, total= 58.8s [CV] C=10, gamma=0.001, kernel=rbf ................................... [CV] ....... C=10, gamma=0.001, kernel=rbf, score=0.012, total= 56.3s [CV] C=10, gamma=0.001, kernel=rbf ................................... [CV] ....... C=10, gamma=0.001, kernel=rbf, score=0.012, total= 58.4s [CV] C=10, gamma=0.001, kernel=rbf ................................... [CV] ....... C=10, gamma=0.001, kernel=rbf, score=0.012, total= 56.6s [CV] C=10, gamma=0.001, kernel=rbf ................................... [CV] ....... C=10, gamma=0.001, kernel=rbf, score=0.012, total= 58.3s [CV] C=10, gamma=0.0001, kernel=rbf .................................. [CV] ...... C=10, gamma=0.0001, kernel=rbf, score=0.013, total= 55.3s [CV] C=10, gamma=0.0001, kernel=rbf .................................. [CV] ...... C=10, gamma=0.0001, kernel=rbf, score=0.012, total= 57.7s [CV] C=10, gamma=0.0001, kernel=rbf .................................. [CV] ...... C=10, gamma=0.0001, kernel=rbf, score=0.012, total= 55.3s [CV] C=10, gamma=0.0001, kernel=rbf .................................. [CV] ...... C=10, gamma=0.0001, kernel=rbf, score=0.012, total= 57.3s [CV] C=10, gamma=0.0001, kernel=rbf .................................. [CV] ...... C=10, gamma=0.0001, kernel=rbf, score=0.012, total= 1.1min [CV] C=100, gamma=0.001, kernel=rbf .................................. [CV] ...... C=100, gamma=0.001, kernel=rbf, score=0.015, total= 1.1min [CV] C=100, gamma=0.001, kernel=rbf .................................. [CV] ...... C=100, gamma=0.001, kernel=rbf, score=0.013, total= 1.1min [CV] C=100, gamma=0.001, kernel=rbf .................................. [CV] ...... C=100, gamma=0.001, kernel=rbf, score=0.015, total= 1.1min [CV] C=100, gamma=0.001, kernel=rbf .................................. [CV] ...... C=100, gamma=0.001, kernel=rbf, score=0.013, total= 59.8s [CV] C=100, gamma=0.001, kernel=rbf .................................. [CV] ...... C=100, gamma=0.001, kernel=rbf, score=0.014, total= 1.0min [CV] C=100, gamma=0.0001, kernel=rbf ................................. [CV] ..... C=100, gamma=0.0001, kernel=rbf, score=0.013, total= 1.0min [CV] C=100, gamma=0.0001, kernel=rbf ................................. [CV] ..... C=100, gamma=0.0001, kernel=rbf, score=0.012, total= 57.2s [CV] C=100, gamma=0.0001, kernel=rbf ................................. [CV] ..... C=100, gamma=0.0001, kernel=rbf, score=0.012, total= 54.1s [CV] C=100, gamma=0.0001, kernel=rbf ................................. [CV] ..... C=100, gamma=0.0001, kernel=rbf, score=0.012, total= 52.2s [CV] C=100, gamma=0.0001, kernel=rbf ................................. [CV] ..... C=100, gamma=0.0001, kernel=rbf, score=0.012, total= 53.5s [CV] C=1000, gamma=0.001, kernel=rbf ................................. [CV] ..... C=1000, gamma=0.001, kernel=rbf, score=0.035, total= 53.0s [CV] C=1000, gamma=0.001, kernel=rbf ................................. [CV] ..... C=1000, gamma=0.001, kernel=rbf, score=0.027, total= 52.5s [CV] C=1000, gamma=0.001, kernel=rbf ................................. [CV] ..... C=1000, gamma=0.001, kernel=rbf, score=0.027, total= 53.0s [CV] C=1000, gamma=0.001, kernel=rbf ................................. [CV] ..... C=1000, gamma=0.001, kernel=rbf, score=0.029, total= 54.6s [CV] C=1000, gamma=0.001, kernel=rbf ................................. [CV] ..... C=1000, gamma=0.001, kernel=rbf, score=0.028, total= 52.8s [CV] C=1000, gamma=0.0001, kernel=rbf ................................ [CV] .... C=1000, gamma=0.0001, kernel=rbf, score=0.015, total= 57.4s [CV] C=1000, gamma=0.0001, kernel=rbf ................................ [CV] .... C=1000, gamma=0.0001, kernel=rbf, score=0.013, total= 55.2s [CV] C=1000, gamma=0.0001, kernel=rbf ................................ [CV] .... C=1000, gamma=0.0001, kernel=rbf, score=0.015, total= 56.2s [CV] C=1000, gamma=0.0001, kernel=rbf ................................ [CV] .... C=1000, gamma=0.0001, kernel=rbf, score=0.013, total= 54.7s [CV] C=1000, gamma=0.0001, kernel=rbf ................................ [CV] .... C=1000, gamma=0.0001, kernel=rbf, score=0.014, total= 56.1s
[Parallel(n_jobs=1)]: Done 40 out of 40 | elapsed: 38.1min finished
import pickle
# save
with open(join(data_dir, 'saved_models', 'svm_model.pkl'),'wb') as file:
pickle.dump(clf, file)
def calculate_confusion_matrix_svm_train_test():
with open(join(data_dir, 'saved_models', 'svm_model.pkl'),'rb') as file:
clf2 = pickle.load(file)
y_pred = clf2.predict(valid_features / 255.)
y_true = hot_encode_to_index(y_valid)
y_pred_labeled = [{'pred_label': index_to_label[pred], 'true_label': index_to_label[y_true[i]],
'filename': valid_filenames[i], 'perc': np.amax(y_pred[i])} for i, pred in enumerate(y_pred)]
correct_pred = list(filter(lambda pred: pred['pred_label'] == pred['true_label'], y_pred_labeled))
worst_wrong_pred = sorted(list(filter(lambda pred: pred['pred_label'] != pred['true_label'], y_pred_labeled)), key=lambda e: e['perc'], reverse=True)[:10]
correct_perc = len(correct_pred) / len(valid_filenames)
return {'confusion': confusion_matrix(y_true, y_pred), 'correct': sorted(correct_pred, key=lambda e: e['perc'], reverse=True)[:10],
'wrong': worst_wrong_pred, 'correct_perc': correct_perc}
svm_cnn_metrics = calculate_confusion_matrix_svm_train_test()
plot_confusion_matrix(svm_cnn_metrics['confusion'], labels=labels, title="Confusion Matrix InceptionV3 based CNN Feature Extractor with SVM Classifier") # Tune hyperparameters SVM fit
read_and_plot_correct_wrong(correct=svm_cnn_metrics['correct'], wrong=svm_cnn_metrics['wrong'])
print('Correct percentage: {}%'.format(round(svm_cnn_metrics['correct_perc'] * 100, 2)))
Correct percentage: 3.23%
display_image("results.png")
Can be found in the attached PDF file.